[med-svn] [libsis-jhdf5-java] 01/04: Imported Upstream version 14.12.1

Olivier Sallou osallou at debian.org
Fri Aug 21 16:35:24 UTC 2015


This is an automated email from the git hooks/post-receive script.

osallou pushed a commit to branch master
in repository libsis-jhdf5-java.

commit dfcb2ff01b4c234fb6974cce3c181640a4fc5f64
Author: Olivier Sallou <osallou at debian.org>
Date:   Fri Aug 21 16:19:29 2015 +0000

    Imported Upstream version 14.12.1
---
 source/c/COPYING                                   |    87 +
 source/c/compile_hdf5_gcc.sh                       |    27 +
 source/c/compile_hdf5_linux_amd64.sh               |     3 +
 source/c/compile_hdf5_linux_arm.sh                 |     3 +
 source/c/compile_hdf5_linux_i386.sh                |     3 +
 source/c/compile_hdf5_macosx_i386.sh               |     3 +
 source/c/compile_hdf5_macosx_x86_64.sh             |     3 +
 source/c/compile_hdf5_sunstudio_64.sh              |    16 +
 source/c/compile_hdf5_sunstudio_sparc32.sh         |    16 +
 source/c/compile_hdf5_sunstudio_x86.sh             |    16 +
 source/c/compile_linux_amd64.sh                    |     5 +
 source/c/compile_linux_arm.sh                      |     5 +
 source/c/compile_linux_i386.sh                     |     5 +
 source/c/compile_macosx_i386.sh                    |     5 +
 source/c/compile_macosx_x86_64.sh                  |     5 +
 source/c/compile_solaris_64.sh                     |     5 +
 source/c/compile_solaris_sparc32.sh                |     5 +
 source/c/compile_solaris_x86.sh                    |     5 +
 source/c/compile_windows_i386.mak                  |   469 +
 source/c/compile_windows_x64.mak                   |   469 +
 source/c/create_win_zip.sh                         |    19 +
 source/c/gcc-4.678-optimizations-config.patch      |    33 +
 source/c/hdf-java/h5Constants.c                    |   661 ++
 source/c/hdf-java/h5Imp.c                          |   212 +
 source/c/hdf-java/h5aImp.c                         |  1456 +++
 source/c/hdf-java/h5dImp.c                         |  2075 ++++
 source/c/hdf-java/h5dImp.h                         |   348 +
 source/c/hdf-java/h5eImp.c                         |   479 +
 source/c/hdf-java/h5eImp.h                         |   142 +
 source/c/hdf-java/h5fImp.c                         |   736 ++
 source/c/hdf-java/h5fImp.h                         |   197 +
 source/c/hdf-java/h5gImp.c                         |  1336 +++
 source/c/hdf-java/h5gImp.h                         |   192 +
 source/c/hdf-java/h5iImp.c                         |   223 +
 source/c/hdf-java/h5jni.h                          |    53 +
 source/c/hdf-java/h5lImp.c                         |  1037 ++
 source/c/hdf-java/h5lImp.h                         |   153 +
 source/c/hdf-java/h5oImp.c                         |   891 ++
 source/c/hdf-java/h5oImp.h                         |   122 +
 source/c/hdf-java/h5pImp.c                         |  4972 ++++++++
 source/c/hdf-java/h5pImp.h                         |  1165 ++
 source/c/hdf-java/h5rImp.c                         |   338 +
 source/c/hdf-java/h5sImp.c                         |  1238 ++
 source/c/hdf-java/h5sImp.h                         |   239 +
 source/c/hdf-java/h5tImp.c                         |  1838 +++
 source/c/hdf-java/h5tImp.h                         |   562 +
 source/c/hdf-java/h5util.c                         |  1830 +++
 source/c/hdf-java/h5util.h                         |    41 +
 source/c/hdf-java/h5zImp.c                         |   103 +
 source/c/hdf-java/nativeData.c                     |  1254 ++
 source/c/hdf5_win_compile.diff                     |    20 +
 source/c/hdf5_win_mt.diff                          |  1659 +++
 source/c/jhdf5/exceptionImpJHDF5.c                 |   769 ++
 source/c/jhdf5/h5ConstantsJHDF5.c                  |   536 +
 source/c/jhdf5/h5ConstantsJHDF5.h                  |   505 +
 source/c/jhdf5/h5ImpJHDF5.c                        |   253 +
 source/c/jhdf5/h5aImpJHDF5.c                       |  1178 ++
 source/c/jhdf5/h5dImpJHDF5.c                       |  1507 +++
 source/c/jhdf5/h5fImpJHDF5.c                       |   556 +
 source/c/jhdf5/h5gImpJHDF5.c                       |  1087 ++
 source/c/jhdf5/h5iImpJHDF5.c                       |   188 +
 source/c/jhdf5/h5lImpJHDF5.c                       |   768 ++
 source/c/jhdf5/h5oImpJHDF5.c                       |   262 +
 source/c/jhdf5/h5pImpJHDF5.c                       |  3528 ++++++
 source/c/jhdf5/h5rImpJHDF5.c                       |   672 ++
 source/c/jhdf5/h5sImpJHDF5.c                       |  1314 +++
 source/c/jhdf5/h5tImpJHDF5.c                       |  1631 +++
 source/c/jhdf5/h5utilJHDF5.c                       |   407 +
 source/c/jhdf5/h5utilJHDF5.h                       |    30 +
 source/c/jhdf5/h5zImpJHDF5.c                       |   107 +
 source/c/jhdf5/strcpyJHDF5.c                       |   159 +
 source/c/version.sh                                |     1 +
 .../systemsx/cisd/hdf5/BitSetConversionUtils.java  |   292 +
 .../cisd/hdf5/BuildAndEnvironmentInfo.java         |    46 +
 .../ch/systemsx/cisd/hdf5/CharacterEncoding.java   |    76 +
 .../ch/systemsx/cisd/hdf5/CompoundElement.java     |    76 +
 .../java/ch/systemsx/cisd/hdf5/CompoundType.java   |    48 +
 .../cisd/hdf5/CompoundTypeInformation.java         |    96 +
 .../ch/systemsx/cisd/hdf5/EnumerationType.java     |   350 +
 source/java/ch/systemsx/cisd/hdf5/HDF5.java        |  2065 ++++
 .../cisd/hdf5/HDF5AbstractStorageFeatures.java     |   378 +
 .../java/ch/systemsx/cisd/hdf5/HDF5BaseReader.java |  1665 +++
 .../java/ch/systemsx/cisd/hdf5/HDF5BaseWriter.java |  1738 +++
 .../ch/systemsx/cisd/hdf5/HDF5BooleanReader.java   |   320 +
 .../ch/systemsx/cisd/hdf5/HDF5BooleanWriter.java   |   446 +
 .../java/ch/systemsx/cisd/hdf5/HDF5ByteReader.java |   792 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5ByteWriter.java |   703 ++
 .../systemsx/cisd/hdf5/HDF5CommonInformation.java  |   158 +
 .../cisd/hdf5/HDF5CompoundByteifyerFactory.java    |   374 +
 .../systemsx/cisd/hdf5/HDF5CompoundDataList.java   |    54 +
 .../ch/systemsx/cisd/hdf5/HDF5CompoundDataMap.java |    62 +
 .../hdf5/HDF5CompoundInformationRetriever.java     |   838 ++
 .../cisd/hdf5/HDF5CompoundMappingHints.java        |   164 +
 .../HDF5CompoundMemberByteifyerBitSetFactory.java  |   293 +
 .../HDF5CompoundMemberByteifyerBooleanFactory.java |   265 +
 .../HDF5CompoundMemberByteifyerByteFactory.java    |   534 +
 .../HDF5CompoundMemberByteifyerDateFactory.java    |   297 +
 .../HDF5CompoundMemberByteifyerDoubleFactory.java  |   520 +
 ...DF5CompoundMemberByteifyerEnumArrayFactory.java |   444 +
 .../HDF5CompoundMemberByteifyerEnumFactory.java    |   423 +
 .../HDF5CompoundMemberByteifyerFloatFactory.java   |   519 +
 ...oundMemberByteifyerHDF5TimeDurationFactory.java |   311 +
 .../HDF5CompoundMemberByteifyerIntFactory.java     |   515 +
 .../HDF5CompoundMemberByteifyerLongFactory.java    |   515 +
 .../HDF5CompoundMemberByteifyerShortFactory.java   |   523 +
 .../HDF5CompoundMemberByteifyerStringFactory.java  |   415 +
 .../cisd/hdf5/HDF5CompoundMemberInformation.java   |   301 +
 .../cisd/hdf5/HDF5CompoundMemberMapping.java       |  1495 +++
 .../ch/systemsx/cisd/hdf5/HDF5CompoundReader.java  |   682 ++
 .../ch/systemsx/cisd/hdf5/HDF5CompoundType.java    |   371 +
 .../ch/systemsx/cisd/hdf5/HDF5CompoundWriter.java  |   993 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5DataBlock.java  |    78 +
 .../java/ch/systemsx/cisd/hdf5/HDF5DataClass.java  |   284 +
 .../systemsx/cisd/hdf5/HDF5DataSetInformation.java |   241 +
 .../java/ch/systemsx/cisd/hdf5/HDF5DataType.java   |   189 +
 .../cisd/hdf5/HDF5DataTypeInformation.java         |   595 +
 .../ch/systemsx/cisd/hdf5/HDF5DataTypeVariant.java |   203 +
 .../ch/systemsx/cisd/hdf5/HDF5DateTimeReader.java  |   638 ++
 .../ch/systemsx/cisd/hdf5/HDF5DateTimeWriter.java  |   683 ++
 .../ch/systemsx/cisd/hdf5/HDF5DoubleReader.java    |   792 ++
 .../ch/systemsx/cisd/hdf5/HDF5DoubleWriter.java    |   702 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5EnumReader.java |   795 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5EnumWriter.java |   905 ++
 .../ch/systemsx/cisd/hdf5/HDF5EnumerationType.java |   234 +
 .../systemsx/cisd/hdf5/HDF5EnumerationValue.java   |   190 +
 .../cisd/hdf5/HDF5EnumerationValueArray.java       |   625 +
 .../cisd/hdf5/HDF5EnumerationValueMDArray.java     |   766 ++
 source/java/ch/systemsx/cisd/hdf5/HDF5Factory.java |   125 +
 .../ch/systemsx/cisd/hdf5/HDF5FactoryProvider.java |    94 +
 .../cisd/hdf5/HDF5FileLevelReadOnlyHandler.java    |    66 +
 .../cisd/hdf5/HDF5FileLevelReadWriteHandler.java   |    86 +
 .../ch/systemsx/cisd/hdf5/HDF5FloatReader.java     |   792 ++
 .../cisd/hdf5/HDF5FloatStorageFeatures.java        |   869 ++
 .../ch/systemsx/cisd/hdf5/HDF5FloatWriter.java     |   702 ++
 .../cisd/hdf5/HDF5GenericStorageFeatures.java      |   546 +
 .../java/ch/systemsx/cisd/hdf5/HDF5IntReader.java  |   792 ++
 .../systemsx/cisd/hdf5/HDF5IntStorageFeatures.java |  1231 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5IntWriter.java  |   703 ++
 .../ch/systemsx/cisd/hdf5/HDF5LinkInformation.java |   121 +
 .../java/ch/systemsx/cisd/hdf5/HDF5LongReader.java |   792 ++
 .../java/ch/systemsx/cisd/hdf5/HDF5LongWriter.java |   703 ++
 .../ch/systemsx/cisd/hdf5/HDF5MDDataBlock.java     |    84 +
 .../ch/systemsx/cisd/hdf5/HDF5MDEnumBlock.java     |    81 +
 .../ch/systemsx/cisd/hdf5/HDF5MemberByteifyer.java |   207 +
 .../cisd/hdf5/HDF5NaturalBlock1DParameters.java    |    92 +
 .../cisd/hdf5/HDF5NaturalBlockMDParameters.java    |   121 +
 .../systemsx/cisd/hdf5/HDF5ObjectInformation.java  |   134 +
 .../HDF5ObjectReadOnlyInfoProviderHandler.java     |   519 +
 .../HDF5ObjectReadWriteInfoProviderHandler.java    |   334 +
 .../java/ch/systemsx/cisd/hdf5/HDF5ObjectType.java |   104 +
 .../ch/systemsx/cisd/hdf5/HDF5OpaqueReader.java    |   309 +
 .../java/ch/systemsx/cisd/hdf5/HDF5OpaqueType.java |    53 +
 .../ch/systemsx/cisd/hdf5/HDF5OpaqueWriter.java    |   235 +
 source/java/ch/systemsx/cisd/hdf5/HDF5Reader.java  |  2341 ++++
 .../systemsx/cisd/hdf5/HDF5ReaderConfigurator.java |    91 +
 .../ch/systemsx/cisd/hdf5/HDF5ReferenceReader.java |   651 ++
 .../ch/systemsx/cisd/hdf5/HDF5ReferenceWriter.java |   515 +
 .../ch/systemsx/cisd/hdf5/HDF5ShortReader.java     |   792 ++
 .../ch/systemsx/cisd/hdf5/HDF5ShortWriter.java     |   703 ++
 .../ch/systemsx/cisd/hdf5/HDF5StorageLayout.java   |    33 +
 .../ch/systemsx/cisd/hdf5/HDF5StringReader.java    |   677 ++
 .../ch/systemsx/cisd/hdf5/HDF5StringWriter.java    |   964 ++
 .../ch/systemsx/cisd/hdf5/HDF5TimeDuration.java    |   111 +
 .../systemsx/cisd/hdf5/HDF5TimeDurationArray.java  |   219 +
 .../cisd/hdf5/HDF5TimeDurationMDArray.java         |   577 +
 .../systemsx/cisd/hdf5/HDF5TimeDurationReader.java |   662 ++
 .../systemsx/cisd/hdf5/HDF5TimeDurationWriter.java |   625 +
 .../java/ch/systemsx/cisd/hdf5/HDF5TimeUnit.java   |   271 +
 .../systemsx/cisd/hdf5/HDF5UnsignedByteReader.java |   793 ++
 .../systemsx/cisd/hdf5/HDF5UnsignedByteWriter.java |   702 ++
 .../systemsx/cisd/hdf5/HDF5UnsignedIntReader.java  |   793 ++
 .../systemsx/cisd/hdf5/HDF5UnsignedIntWriter.java  |   702 ++
 .../systemsx/cisd/hdf5/HDF5UnsignedLongReader.java |   793 ++
 .../systemsx/cisd/hdf5/HDF5UnsignedLongWriter.java |   702 ++
 .../cisd/hdf5/HDF5UnsignedShortReader.java         |   793 ++
 .../cisd/hdf5/HDF5UnsignedShortWriter.java         |   702 ++
 source/java/ch/systemsx/cisd/hdf5/HDF5Utils.java   |   566 +
 .../cisd/hdf5/HDF5ValueObjectByteifyer.java        |   363 +
 source/java/ch/systemsx/cisd/hdf5/HDF5Writer.java  |  3110 +++++
 .../systemsx/cisd/hdf5/HDF5WriterConfigurator.java |   138 +
 .../ch/systemsx/cisd/hdf5/IHDF5BooleanReader.java  |   161 +
 .../ch/systemsx/cisd/hdf5/IHDF5BooleanWriter.java  |   337 +
 .../ch/systemsx/cisd/hdf5/IHDF5ByteReader.java     |   354 +
 .../ch/systemsx/cisd/hdf5/IHDF5ByteWriter.java     |   586 +
 .../cisd/hdf5/IHDF5CompoundBasicReader.java        |   665 ++
 .../cisd/hdf5/IHDF5CompoundBasicWriter.java        |   486 +
 .../hdf5/IHDF5CompoundInformationRetriever.java    |   750 ++
 .../ch/systemsx/cisd/hdf5/IHDF5CompoundReader.java |   438 +
 .../ch/systemsx/cisd/hdf5/IHDF5CompoundWriter.java |   644 ++
 .../ch/systemsx/cisd/hdf5/IHDF5DateTimeReader.java |   437 +
 .../ch/systemsx/cisd/hdf5/IHDF5DateTimeWriter.java |   436 +
 .../ch/systemsx/cisd/hdf5/IHDF5DoubleReader.java   |   354 +
 .../ch/systemsx/cisd/hdf5/IHDF5DoubleWriter.java   |   546 +
 .../systemsx/cisd/hdf5/IHDF5EnumBasicReader.java   |   364 +
 .../systemsx/cisd/hdf5/IHDF5EnumBasicWriter.java   |   297 +
 .../ch/systemsx/cisd/hdf5/IHDF5EnumReader.java     |   347 +
 .../systemsx/cisd/hdf5/IHDF5EnumTypeRetriever.java |   164 +
 .../systemsx/cisd/hdf5/IHDF5EnumValueCreator.java  |   456 +
 .../ch/systemsx/cisd/hdf5/IHDF5EnumWriter.java     |   361 +
 .../java/ch/systemsx/cisd/hdf5/IHDF5Factory.java   |    62 +
 .../cisd/hdf5/IHDF5FileLevelReadOnlyHandler.java   |    66 +
 .../cisd/hdf5/IHDF5FileLevelReadWriteHandler.java  |    95 +
 .../ch/systemsx/cisd/hdf5/IHDF5FloatReader.java    |   354 +
 .../ch/systemsx/cisd/hdf5/IHDF5FloatWriter.java    |   546 +
 .../java/ch/systemsx/cisd/hdf5/IHDF5IntReader.java |   354 +
 .../java/ch/systemsx/cisd/hdf5/IHDF5IntWriter.java |   586 +
 .../ch/systemsx/cisd/hdf5/IHDF5LegacyReader.java   |  3342 ++++++
 .../ch/systemsx/cisd/hdf5/IHDF5LegacyWriter.java   |  5139 +++++++++
 .../ch/systemsx/cisd/hdf5/IHDF5LongReader.java     |   354 +
 .../ch/systemsx/cisd/hdf5/IHDF5LongWriter.java     |   586 +
 .../IHDF5ObjectReadOnlyInfoProviderHandler.java    |   421 +
 .../IHDF5ObjectReadWriteInfoProviderHandler.java   |   237 +
 .../ch/systemsx/cisd/hdf5/IHDF5OpaqueReader.java   |   137 +
 .../ch/systemsx/cisd/hdf5/IHDF5OpaqueWriter.java   |   172 +
 source/java/ch/systemsx/cisd/hdf5/IHDF5Reader.java |   239 +
 .../cisd/hdf5/IHDF5ReaderConfigurator.java         |    68 +
 .../systemsx/cisd/hdf5/IHDF5ReferenceReader.java   |   419 +
 .../systemsx/cisd/hdf5/IHDF5ReferenceWriter.java   |   311 +
 .../ch/systemsx/cisd/hdf5/IHDF5ShortReader.java    |   354 +
 .../ch/systemsx/cisd/hdf5/IHDF5ShortWriter.java    |   586 +
 .../ch/systemsx/cisd/hdf5/IHDF5SimpleReader.java   |   349 +
 .../ch/systemsx/cisd/hdf5/IHDF5SimpleWriter.java   |   312 +
 .../ch/systemsx/cisd/hdf5/IHDF5StringReader.java   |   320 +
 .../ch/systemsx/cisd/hdf5/IHDF5StringWriter.java   |   593 +
 .../cisd/hdf5/IHDF5TimeDurationReader.java         |   268 +
 .../cisd/hdf5/IHDF5TimeDurationWriter.java         |   356 +
 .../cisd/hdf5/IHDF5UnsignedByteWriter.java         |   447 +
 .../systemsx/cisd/hdf5/IHDF5UnsignedIntWriter.java |   447 +
 .../cisd/hdf5/IHDF5UnsignedLongWriter.java         |   447 +
 .../cisd/hdf5/IHDF5UnsignedShortWriter.java        |   447 +
 source/java/ch/systemsx/cisd/hdf5/IHDF5Writer.java |   265 +
 .../cisd/hdf5/IHDF5WriterConfigurator.java         |   180 +
 source/java/ch/systemsx/cisd/hdf5/IndexMap.java    |    51 +
 source/java/ch/systemsx/cisd/hdf5/MatrixUtils.java |   467 +
 .../java/ch/systemsx/cisd/hdf5/PaddingUtils.java   |    72 +
 .../ch/systemsx/cisd/hdf5/ReflectionUtils.java     |   139 +
 source/java/ch/systemsx/cisd/hdf5/StringUtils.java |   221 +
 .../ch/systemsx/cisd/hdf5/UnsignedIntUtils.java    |   144 +
 .../cisd/hdf5/cleanup/CleanUpCallable.java         |    48 +
 .../cisd/hdf5/cleanup/CleanUpRegistry.java         |    90 +
 .../cisd/hdf5/cleanup/ICallableWithCleanUp.java    |    34 +
 .../cisd/hdf5/cleanup/ICleanUpRegistry.java        |    35 +
 .../ch/systemsx/cisd/hdf5/h5ar/ArchiveEntry.java   |   733 ++
 .../cisd/hdf5/h5ar/ArchiveEntryCompleteness.java   |    32 +
 .../hdf5/h5ar/ArchiveEntryExtractProcessor.java    |   284 +
 .../cisd/hdf5/h5ar/ArchiveEntryListProcessor.java  |   137 +
 .../hdf5/h5ar/ArchiveEntryVerifyProcessor.java     |   294 +
 .../systemsx/cisd/hdf5/h5ar/ArchiverException.java |    80 +
 .../cisd/hdf5/h5ar/ArchivingException.java         |    60 +
 .../systemsx/cisd/hdf5/h5ar/ArchivingStrategy.java |   390 +
 .../cisd/hdf5/h5ar/DeleteFromArchiveException.java |    46 +
 .../ch/systemsx/cisd/hdf5/h5ar/DirectoryIndex.java |   534 +
 .../cisd/hdf5/h5ar/DirectoryIndexProvider.java     |    91 +
 .../cisd/hdf5/h5ar/DirectoryIndexUpdater.java      |    99 +
 .../ch/systemsx/cisd/hdf5/h5ar/GroupCache.java     |    71 +
 .../cisd/hdf5/h5ar/HDF5ArchiveDeleter.java         |    79 +
 .../cisd/hdf5/h5ar/HDF5ArchiveTraverser.java       |   187 +
 .../cisd/hdf5/h5ar/HDF5ArchiveUpdater.java         |   723 ++
 .../ch/systemsx/cisd/hdf5/h5ar/HDF5Archiver.java   |   955 ++
 .../cisd/hdf5/h5ar/HDF5ArchiverFactory.java        |   213 +
 .../systemsx/cisd/hdf5/h5ar/HDF5ArchiverMain.java  |   634 ++
 .../cisd/hdf5/h5ar/IArchiveEntryProcessor.java     |    83 +
 .../cisd/hdf5/h5ar/IArchiveEntryVisitor.java       |    48 +
 .../systemsx/cisd/hdf5/h5ar/IDirectoryIndex.java   |   102 +
 .../cisd/hdf5/h5ar/IDirectoryIndexProvider.java    |    36 +
 .../cisd/hdf5/h5ar/IHDF5ArchiveInfoProvider.java   |   150 +
 .../cisd/hdf5/h5ar/IHDF5ArchiveReader.java         |   366 +
 .../ch/systemsx/cisd/hdf5/h5ar/IHDF5Archiver.java  |   418 +
 .../java/ch/systemsx/cisd/hdf5/h5ar/IdCache.java   |   109 +
 .../ch/systemsx/cisd/hdf5/h5ar/LinkRecord.java     |   493 +
 .../java/ch/systemsx/cisd/hdf5/h5ar/LinkStore.java |   192 +
 .../cisd/hdf5/h5ar/ListArchiveException.java       |    56 +
 .../ListArchiveTooManySymbolicLinksException.java  |    35 +
 .../ch/systemsx/cisd/hdf5/h5ar/ListParameters.java |   362 +
 .../systemsx/cisd/hdf5/h5ar/NewArchiveEntry.java   |   366 +
 .../cisd/hdf5/h5ar/UnarchivingException.java       |    67 +
 source/java/ch/systemsx/cisd/hdf5/h5ar/Utils.java  |   218 +
 .../cisd/hdf5/h5ar/VerifyArchiveException.java     |    56 +
 .../systemsx/cisd/hdf5/h5ar/VerifyParameters.java  |   179 +
 .../java/ch/systemsx/cisd/hdf5/h5ar/package.html   |    12 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5.java  |  3934 +++++++
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5A.java |   494 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5D.java |   503 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5F.java |   266 +
 .../java/ch/systemsx/cisd/hdf5/hdf5lib/H5GLO.java  |   444 +
 .../ch/systemsx/cisd/hdf5/hdf5lib/H5General.java   |   119 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5P.java |  1448 +++
 .../java/ch/systemsx/cisd/hdf5/hdf5lib/H5RI.java   |   286 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5S.java |   505 +
 source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5T.java |  1055 ++
 .../systemsx/cisd/hdf5/hdf5lib/HDF5Constants.java  |  1941 ++++
 .../systemsx/cisd/hdf5/hdf5lib/HDF5GroupInfo.java  |   188 +
 .../systemsx/cisd/hdf5/hdf5lib/HDFNativeData.java  |   318 +
 .../ch/systemsx/cisd/hdf5/hdf5lib/package.html     |   153 +
 .../cisd/hdf5/io/HDF5DataSetRandomAccessFile.java  |   868 ++
 .../cisd/hdf5/io/HDF5IOAdapterFactory.java         |   373 +
 source/java/ch/systemsx/cisd/hdf5/io/package.html  |    12 +
 source/java/ch/systemsx/cisd/hdf5/package.html     |    12 +
 source/java/ncsa/hdf/hdf5lib/H5.java               |  9215 +++++++++++++++
 source/java/ncsa/hdf/hdf5lib/HDF5Constants.java    |  1790 +++
 source/java/ncsa/hdf/hdf5lib/HDF5GroupInfo.java    |   170 +
 source/java/ncsa/hdf/hdf5lib/HDFArray.java         |  1093 ++
 source/java/ncsa/hdf/hdf5lib/HDFNativeData.java    |   476 +
 .../java/ncsa/hdf/hdf5lib/callbacks/Callbacks.java |    18 +
 .../ncsa/hdf/hdf5lib/callbacks/H5D_iterate_cb.java |    19 +
 .../ncsa/hdf/hdf5lib/callbacks/H5D_iterate_t.java  |     7 +
 .../ncsa/hdf/hdf5lib/callbacks/H5L_iterate_cb.java |    21 +
 .../ncsa/hdf/hdf5lib/callbacks/H5L_iterate_t.java  |     7 +
 .../ncsa/hdf/hdf5lib/callbacks/H5O_iterate_cb.java |    21 +
 .../ncsa/hdf/hdf5lib/callbacks/H5O_iterate_t.java  |     7 +
 .../hdf/hdf5lib/exceptions/HDF5AtomException.java  |    38 +
 .../hdf5lib/exceptions/HDF5AttributeException.java |    36 +
 .../hdf/hdf5lib/exceptions/HDF5BtreeException.java |    36 +
 .../exceptions/HDF5DataFiltersException.java       |    36 +
 .../exceptions/HDF5DataStorageException.java       |    37 +
 .../exceptions/HDF5DatasetInterfaceException.java  |    36 +
 .../HDF5DataspaceInterfaceException.java           |    37 +
 .../exceptions/HDF5DatatypeInterfaceException.java |    37 +
 .../ncsa/hdf/hdf5lib/exceptions/HDF5Exception.java |    63 +
 .../exceptions/HDF5ExternalFileListException.java  |    37 +
 .../exceptions/HDF5FileInterfaceException.java     |    37 +
 .../exceptions/HDF5FileNotFoundException.java      |    38 +
 .../exceptions/HDF5FunctionArgumentException.java  |    37 +
 .../exceptions/HDF5FunctionEntryExitException.java |    37 +
 .../hdf/hdf5lib/exceptions/HDF5HeapException.java  |    37 +
 .../exceptions/HDF5InternalErrorException.java     |    38 +
 .../hdf/hdf5lib/exceptions/HDF5JavaException.java  |    41 +
 .../hdf5lib/exceptions/HDF5LibraryException.java   |   256 +
 .../exceptions/HDF5LowLevelIOException.java        |    37 +
 .../exceptions/HDF5MetaDataCacheException.java     |    37 +
 .../exceptions/HDF5ObjectHeaderException.java      |    37 +
 .../HDF5PropertyListInterfaceException.java        |    37 +
 .../hdf5lib/exceptions/HDF5ReferenceException.java |    31 +
 .../HDF5ResourceUnavailableException.java          |    37 +
 .../hdf5lib/exceptions/HDF5SpaceRankMismatch.java  |    50 +
 .../exceptions/HDF5SymbolTableException.java       |    37 +
 .../hdf/hdf5lib/structs/H5AC_cache_config_t.java   |    97 +
 .../java/ncsa/hdf/hdf5lib/structs/H5A_info_t.java  |    32 +
 .../java/ncsa/hdf/hdf5lib/structs/H5G_info_t.java  |    25 +
 .../java/ncsa/hdf/hdf5lib/structs/H5L_info_t.java  |    36 +
 .../ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t.java   |    47 +
 .../java/ncsa/hdf/hdf5lib/structs/H5O_info_t.java  |    52 +
 .../ncsa/hdf/hdf5lib/structs/H5_ih_info_t.java     |    29 +
 .../systemsx/cisd/hdf5/BitSetConversionTest.java   |   128 +
 .../java/ch/systemsx/cisd/hdf5/Dir2HDF5.java       |    64 +
 .../java/ch/systemsx/cisd/hdf5/HDF52Dir.java       |    58 +
 .../cisd/hdf5/HDF5ArrayTypeFloatWriter.java        |   232 +
 .../java/ch/systemsx/cisd/hdf5/HDF5Extract.java    |    50 +
 .../java/ch/systemsx/cisd/hdf5/HDF5ReadTest.java   |   116 +
 .../ch/systemsx/cisd/hdf5/HDF5RoundtripTest.java   | 11396 +++++++++++++++++++
 .../java/ch/systemsx/cisd/hdf5/HDF5SpeedTest.java  |   110 +
 .../cisd/hdf5/HDF5TimeDurationReaderTest.java      |    61 +
 .../ch/systemsx/cisd/hdf5/HDF5TimeUnitTest.java    |    57 +
 .../java/ch/systemsx/cisd/hdf5/HDF5UtilsTest.java  |    57 +
 .../java/ch/systemsx/cisd/hdf5/HDF5WriteTest.java  |    70 +
 .../ch/systemsx/cisd/hdf5/MatrixUtilsTest.java     |    56 +
 .../ch/systemsx/cisd/hdf5/TestLowLevelHDF5.java    |    65 +
 .../systemsx/cisd/hdf5/UnsignedIntUtilsTest.java   |    81 +
 .../cisd/hdf5/h5ar/ArchivingStrategyTest.java      |   113 +
 .../cisd/hdf5/h5ar/DirectoryIndexUpdaterTest.java  |   226 +
 .../systemsx/cisd/hdf5/h5ar/HDF5ArchiverTest.java  |   949 ++
 .../java/ch/systemsx/cisd/hdf5/h5ar/UtilsTest.java |    57 +
 .../java/ch/systemsx/cisd/hdf5/h5ar/test.h5ar      |   Bin 0 -> 16104 bytes
 .../ch/systemsx/cisd/hdf5/h5ar/test_14_12_0.h5ar   |   Bin 0 -> 16104 bytes
 .../hdf5/io/HDF5DataSetRandomAccessFileTest.java   |  1035 ++
 .../cisd/hdf5/tools/HDF5CodeGenerator.java         |   297 +
 .../cisd/hdf5/tools/HDF5PrimitiveReader.java.templ |   792 ++
 .../cisd/hdf5/tools/HDF5PrimitiveWriter.java.templ |   702 ++
 .../tools/HDF5UnsignedPrimitiveReader.java.templ   |   793 ++
 .../tools/HDF5UnsignedPrimitiveWriter.java.templ   |   702 ++
 .../hdf5/tools/IHDF5PrimitiveReader.java.templ     |   354 +
 .../hdf5/tools/IHDF5PrimitiveWriter.java.templ     |   546 +
 .../tools/IHDF5UnsignedPrimitiveWriter.java.templ  |   447 +
 sourceTest/java/test/hdf5lib/TestAll.java          |    73 +
 sourceTest/java/test/hdf5lib/TestH5.java           |   230 +
 sourceTest/java/test/hdf5lib/TestH5A.java          |   809 ++
 sourceTest/java/test/hdf5lib/TestH5D.java          |   856 ++
 sourceTest/java/test/hdf5lib/TestH5Dparams.java    |   116 +
 sourceTest/java/test/hdf5lib/TestH5Dplist.java     |   196 +
 sourceTest/java/test/hdf5lib/TestH5E.java          |   394 +
 sourceTest/java/test/hdf5lib/TestH5Edefault.java   |   530 +
 sourceTest/java/test/hdf5lib/TestH5Eregister.java  |    54 +
 sourceTest/java/test/hdf5lib/TestH5F.java          |   229 +
 sourceTest/java/test/hdf5lib/TestH5Fbasic.java     |   284 +
 sourceTest/java/test/hdf5lib/TestH5Fparams.java    |   153 +
 sourceTest/java/test/hdf5lib/TestH5G.java          |   490 +
 sourceTest/java/test/hdf5lib/TestH5Gbasic.java     |   345 +
 sourceTest/java/test/hdf5lib/TestH5Giterate.java   |   117 +
 sourceTest/java/test/hdf5lib/TestH5Lbasic.java     |   352 +
 sourceTest/java/test/hdf5lib/TestH5Lcreate.java    |   799 ++
 sourceTest/java/test/hdf5lib/TestH5Lparams.java    |   208 +
 sourceTest/java/test/hdf5lib/TestH5Obasic.java     |   328 +
 sourceTest/java/test/hdf5lib/TestH5Ocopy.java      |   293 +
 sourceTest/java/test/hdf5lib/TestH5Ocreate.java    |   489 +
 sourceTest/java/test/hdf5lib/TestH5Oparams.java    |   126 +
 sourceTest/java/test/hdf5lib/TestH5P.java          |  1162 ++
 sourceTest/java/test/hdf5lib/TestH5PData.java      |   150 +
 sourceTest/java/test/hdf5lib/TestH5Pfapl.java      |  1314 +++
 sourceTest/java/test/hdf5lib/TestH5R.java          |   318 +
 sourceTest/java/test/hdf5lib/TestH5S.java          |   537 +
 sourceTest/java/test/hdf5lib/TestH5Sbasic.java     |   221 +
 sourceTest/java/test/hdf5lib/TestH5T.java          |   434 +
 sourceTest/java/test/hdf5lib/TestH5Tbasic.java     |   137 +
 sourceTest/java/test/hdf5lib/TestH5Tparams.java    |   364 +
 sourceTest/java/test/hdf5lib/TestH5Z.java          |    74 +
 sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf    |   Bin 0 -> 2928 bytes
 sourceTest/java/tests.xml                          |    12 +
 407 files changed, 196037 insertions(+)

diff --git a/source/c/COPYING b/source/c/COPYING
new file mode 100644
index 0000000..4f2260f
--- /dev/null
+++ b/source/c/COPYING
@@ -0,0 +1,87 @@
+Copyright Notice and Statement for NCSA Hierarchical Data Format (HDF)
+Java Software Library and Utilities
+
+NCSA Hierarchical Data Format (HDF) Software Library and Utilities 
+Copyright 1988-2004, the Board of Trustees of the University of Illinois.
+Copyright 2007-2009, Center for Information Sciences and Databases, ETH Zurich, Switzerland.  
+All rights reserved.
+
+Contributors to the library: National Center for Supercomputing 
+Applications (NCSA) at the University of Illinois, Lawrence 
+Livermore Nationall Laboratory (LLNL), Sandia National Laboratories (SNL), 
+Los Alamos National Laboratory (LANL). Fortner Software, Unidata 
+Program Center (netCDF), The Independent JPEG Group (JPEG), 
+Jean-loup Gailly and Mark Adler (gzip), and Digital Equipment 
+Corporation (DEC). Macintosh support contributed by Gregory L. Guerin.
+
+The package 'glguerin':
+Copyright 1998, 1999 by Gregory L. Guerin.
+Redistribute or reuse only as described below.
+These files are from the MacBinary Toolkit for Java:
+   <http://www.amug.org/~glguerin/sw/#macbinary>
+and are redistributed by NCSA with permission of the author.
+
+This work was supported in part by a Cooperative Agreement with 
+NASA under NASA grant NAG 5-2040 and NAG NCC5-599.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted for any purpose (including commercial purposes)
+provided that the following conditions are met:
+
+1.  Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or materials provided with the distribution.
+
+3.  In addition, redistributions of modified forms of the source or binary
+    code must carry prominent notices stating that the original code was
+    changed and the date of the change.
+
+4.  All publications or advertising materials mentioning features or use of
+    this software must acknowledge that it was developed by the National
+    Center for Supercomputing Applications at the University of Illinois, and
+    credit the Contributors.
+
+5.  Neither the name of the University nor the names of the Contributors may
+    be used to endorse or promote products derived from this software without
+    specific prior written permission from the University or the Contributors.
+
+6.  THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND THE CONTRIBUTORS "AS IS"
+    WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED.  In no event
+    shall the University or the Contributors be liable for any damages
+    suffered by the users arising out of the use of this software, even if
+    advised of the possibility of such damage.
+
+--------------------------------------------------------------------------
+Portions of HDF5 were developed with support from the University of 
+California, Lawrence Livermore National Laboratory (UC LLNL).
+The following statement applies to those portions of the product
+and must be retained in any redistribution of source code, binaries,
+documentation, and/or accompanying materials:
+
+    This work was partially produced at the University of California,
+    Lawrence Livermore National Laboratory (UC LLNL) under contract no.
+    W-7405-ENG-48 (Contract 48) between the U.S. Department of Energy 
+    (DOE) and The Regents of the University of California (University) 
+    for the operation of UC LLNL.
+
+    DISCLAIMER:
+    This work was prepared as an account of work sponsored by an agency 
+    of the United States Government.  Neither the United States 
+    Government nor the University of California nor any of their 
+    employees, makes any warranty, express or implied, or assumes any 
+    liability or responsibility for the accuracy, completeness, or 
+    usefulness of any information, apparatus, product, or process 
+    disclosed, or represents that its use would not infringe privately-
+    owned rights.  Reference herein to any specific commercial products, 
+    process, or service by trade name, trademark, manufacturer, or 
+    otherwise, does not necessarily constitute or imply its endorsement, 
+    recommendation, or favoring by the United States Government or the 
+    University of California.  The views and opinions of authors 
+    expressed herein do not necessarily state or reflect those of the 
+    United States Government or the University of California, and shall 
+    not be used for advertising or product endorsement purposes.
+--------------------------------------------------------------------------
+
diff --git a/source/c/compile_hdf5_gcc.sh b/source/c/compile_hdf5_gcc.sh
new file mode 100755
index 0000000..c529b79
--- /dev/null
+++ b/source/c/compile_hdf5_gcc.sh
@@ -0,0 +1,27 @@
+#! /bin/bash
+
+source version.sh
+PLATFORM="$1"
+PATCHES="$2"
+
+if [ "$PLATFORM" != "i386" -a "$PLATFORM" != "x86" -a "$PLATFORM" != "amd64" -a "$PLATFORM" != "x86_64" -a "$PLATFORM" != "armv6l" ]; then
+  echo "Syntax: compile_hdf5.sh <platform>"
+  echo "where <platform> is one of i386, x86, amd64, or x86_64"
+  exit 1
+fi
+
+tar xvf hdf5-$VERSION.tar
+
+cd hdf5-$VERSION
+
+if [ -n "$PATCHES" ]; then
+  for p in $PATCHES; do
+    patch -p0 < ../$p
+  done
+fi
+
+CFLAGS=$CFLAGS ./configure --prefix=/opt/hdf5-$VERSION-$PLATFORM --enable-debug=none --enable-production $ADDITIONAL &> configure.log
+
+make &> make.log
+
+make test &> test.log
diff --git a/source/c/compile_hdf5_linux_amd64.sh b/source/c/compile_hdf5_linux_amd64.sh
new file mode 100755
index 0000000..eb3b92d
--- /dev/null
+++ b/source/c/compile_hdf5_linux_amd64.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+CFLAGS='-fPIC -m64' ./compile_hdf5_gcc.sh amd64 "gcc-4.678-optimizations-config.patch"
diff --git a/source/c/compile_hdf5_linux_arm.sh b/source/c/compile_hdf5_linux_arm.sh
new file mode 100755
index 0000000..a812edd
--- /dev/null
+++ b/source/c/compile_hdf5_linux_arm.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+./compile_hdf5_gcc.sh armv6l
diff --git a/source/c/compile_hdf5_linux_i386.sh b/source/c/compile_hdf5_linux_i386.sh
new file mode 100755
index 0000000..cf8dfff
--- /dev/null
+++ b/source/c/compile_hdf5_linux_i386.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+CFLAGS='-m32' ./compile_hdf5_gcc.sh i386 "gcc-4.678-optimizations-config.patch"
diff --git a/source/c/compile_hdf5_macosx_i386.sh b/source/c/compile_hdf5_macosx_i386.sh
new file mode 100755
index 0000000..4018056
--- /dev/null
+++ b/source/c/compile_hdf5_macosx_i386.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+CFLAGS='-m32 -mmacosx-version-min=10.6' ./compile_hdf5_gcc.sh i386
diff --git a/source/c/compile_hdf5_macosx_x86_64.sh b/source/c/compile_hdf5_macosx_x86_64.sh
new file mode 100755
index 0000000..1543508
--- /dev/null
+++ b/source/c/compile_hdf5_macosx_x86_64.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+CFLAGS='-m64 -mmacosx-version-min=10.6' ./compile_hdf5_gcc.sh x86_64
diff --git a/source/c/compile_hdf5_sunstudio_64.sh b/source/c/compile_hdf5_sunstudio_64.sh
new file mode 100755
index 0000000..8e9c646
--- /dev/null
+++ b/source/c/compile_hdf5_sunstudio_64.sh
@@ -0,0 +1,16 @@
+#! /bin/bash
+
+PATH=/opt/SUNWspro/bin:/usr/local/bin:/opt/csw/bin:/usr/sbin:/usr/bin:/usr/openwin/bin:/usr/ccs/bin:/usr/ucb
+export PATH
+
+source version.sh
+
+tar xf hdf5-$VERSION.tar
+
+cd hdf5-$VERSION
+
+CFLAGS='-fast -m64 -KPIC' ./configure --prefix=/opt/hdf5-$VERSION-64 --enable-shared --enable-debug=none --enable-production
+
+make > make.log 2>&1
+
+make test > test.log 2>&1
diff --git a/source/c/compile_hdf5_sunstudio_sparc32.sh b/source/c/compile_hdf5_sunstudio_sparc32.sh
new file mode 100755
index 0000000..b3c32ce
--- /dev/null
+++ b/source/c/compile_hdf5_sunstudio_sparc32.sh
@@ -0,0 +1,16 @@
+#! /bin/bash
+
+PATH=/opt/SUNWspro/bin:/usr/local/bin:/opt/csw/bin:/usr/sbin:/usr/bin:/usr/openwin/bin:/usr/ccs/bin:/usr/ucb
+export PATH
+
+source version.sh
+
+tar xf hdf5-$VERSION.tar
+
+cd hdf5-$VERSION
+
+CPPFLAGS='-D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE' ./configure --prefix=/opt/hdf5-$VERSION-32 --enable-shared --enable-debug=none --enable-production
+
+make > make.log 2>&1
+
+make test > test.log 2>&1
diff --git a/source/c/compile_hdf5_sunstudio_x86.sh b/source/c/compile_hdf5_sunstudio_x86.sh
new file mode 100755
index 0000000..d0f2d15
--- /dev/null
+++ b/source/c/compile_hdf5_sunstudio_x86.sh
@@ -0,0 +1,16 @@
+#! /bin/bash
+
+PATH=/opt/SUNWspro/bin:/usr/local/bin:/opt/csw/bin:/usr/sbin:/usr/bin:/usr/openwin/bin:/usr/ccs/bin:/usr/ucb
+export PATH
+
+source version.sh
+
+tar xf hdf5-$VERSION.tar
+
+cd hdf5-$VERSION
+
+CPPFLAGS='-D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE' CFLAGS='-KPIC' ./configure --prefix=/opt/hdf5-$VERSION-32 --enable-shared --enable-debug=none --enable-production
+
+make > make.log 2>&1
+
+make test > test.log 2>&1
diff --git a/source/c/compile_linux_amd64.sh b/source/c/compile_linux_amd64.sh
new file mode 100755
index 0000000..53295ef
--- /dev/null
+++ b/source/c/compile_linux_amd64.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+gcc -shared -O3 -floop-interchange -floop-strip-mine -floop-block -fgraphite-identity -mtune=corei7 -fPIC -Wl,--exclude-libs,ALL jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-amd64/include -I/usr/java/jdk1.6.0/include -I/usr/java/jdk1.6.0/include/linux /opt/hdf5-${VERSION}-amd64/lib/libhdf5.a -o libjhdf5.so -lz
diff --git a/source/c/compile_linux_arm.sh b/source/c/compile_linux_arm.sh
new file mode 100755
index 0000000..bbbb611
--- /dev/null
+++ b/source/c/compile_linux_arm.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+gcc -shared -O3 -Wl,--exclude-libs,ALL jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-armv6l/include -I/usr/java/jdk1.7.0/include -I/usr/java/jdk1.7.0/include/linux /opt/hdf5-${VERSION}-armv6l/lib/libhdf5.a -o libjhdf5.so -lz
diff --git a/source/c/compile_linux_i386.sh b/source/c/compile_linux_i386.sh
new file mode 100755
index 0000000..da706a5
--- /dev/null
+++ b/source/c/compile_linux_i386.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+gcc -m32 -shared -O3 -floop-interchange -floop-strip-mine -floop-block -fgraphite-identity -mtune=corei7 -Wl,--exclude-libs,ALL jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-i386/include -I/usr/java/jdk1.6.0/include -I/usr/java/jdk1.6.0/include/linux /opt/hdf5-${VERSION}-i386/lib/libhdf5.a -o libjhdf5.so -lz
diff --git a/source/c/compile_macosx_i386.sh b/source/c/compile_macosx_i386.sh
new file mode 100755
index 0000000..6e8f031
--- /dev/null
+++ b/source/c/compile_macosx_i386.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+gcc -m32 -mmacosx-version-min=10.6 -bundle -O3 jhdf5/*.c hdf-java/*.c -I/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers -I/opt/hdf5-${VERSION}-i386/include /opt/hdf5-${VERSION}-i386/lib/libhdf5.a -lz -o libjhdf5.jnilib
diff --git a/source/c/compile_macosx_x86_64.sh b/source/c/compile_macosx_x86_64.sh
new file mode 100755
index 0000000..ad4092a
--- /dev/null
+++ b/source/c/compile_macosx_x86_64.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+gcc -m64 -mmacosx-version-min=10.6 -dynamiclib -O3 jhdf5/*.c hdf-java/*.c -I/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers -I/opt/hdf5-${VERSION}-x86_64/include /opt/hdf5-${VERSION}-x86_64/lib/libhdf5.a -lz -o libjhdf5.jnilib
diff --git a/source/c/compile_solaris_64.sh b/source/c/compile_solaris_64.sh
new file mode 100755
index 0000000..b12e1b0
--- /dev/null
+++ b/source/c/compile_solaris_64.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+cc -G -KPIC -fast -m64 jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-64/include -I/usr/java/include -I/usr/java/include/solaris /opt/hdf5-${VERSION}-64/lib/libhdf5.a -lz -o libjhdf5.so
diff --git a/source/c/compile_solaris_sparc32.sh b/source/c/compile_solaris_sparc32.sh
new file mode 100755
index 0000000..191b5cd
--- /dev/null
+++ b/source/c/compile_solaris_sparc32.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+cc -G -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-32/include -I/usr/java/include -I/usr/java/include/solaris /opt/hdf5-${VERSION}-32/lib/libhdf5.a -lz -o libjhdf5.so
diff --git a/source/c/compile_solaris_x86.sh b/source/c/compile_solaris_x86.sh
new file mode 100644
index 0000000..836bacb
--- /dev/null
+++ b/source/c/compile_solaris_x86.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+source version.sh
+
+cc -G -KPIC -fast -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE jhdf5/*.c hdf-java/*.c -I/opt/hdf5-${VERSION}-32/include -I/usr/java/include -I/usr/java/include/solaris /opt/hdf5-${VERSION}-32/lib/libhdf5.a -lz -o libjhdf5.so
diff --git a/source/c/compile_windows_i386.mak b/source/c/compile_windows_i386.mak
new file mode 100644
index 0000000..fb631e4
--- /dev/null
+++ b/source/c/compile_windows_i386.mak
@@ -0,0 +1,469 @@
+#============================================================================
+#
+#              Makefile to compile HDF Java Native C Source
+#              Usage: nmake /f compile_windows_i386.mak
+#
+#============================================================================
+
+# Visual C++ directory, for example
+VCPPDIR=C:\Program Files\Microsoft Visual Studio 9.0\VC
+
+# Directory where JDK is installed (We require JDK 1.6)
+JAVADIR=C:\Program Files\Java\jdk1.6.0_37
+
+# Common parent directory
+HDFPARENTDIR=C:\JHDF5
+
+# Directory of the HDF Java Products, for example
+HDFJAVADIR=$(HDFPARENTDIR)\jhdf5_src\
+
+# The directory where HDF5 has been compiled
+HDFDIR=$(HDFPARENTDIR)\hdf5\hdf5-1.8.14
+
+# The directory where HDF library is located
+HDFBUILDDIR=$(HDFDIR)\build
+
+# The directory where HDF library is located
+HDFLIBDIR=$(HDFBUILDDIR)\bin\Release
+
+# The directory where HDF header files are located
+HDFINCDIR=$(HDFDIR)\src
+
+# the JPEG library, for example
+#JPEGLIB=E:\Work\MyHDFstuff\lib-external\jpeg\libjpeg.lib
+JPEGLIB=
+
+# the GZIP library, for example
+#GZIPLIB=E:\Work\MyHDFstuff\lib-external\zlib\bin\windows\zlib114\lib\zlib.lib
+GZIPLIB=$(HDFLIBDIR)\zlibstatic.lib
+
+# SZIP library, for example
+#SZIPLIB=E:\Work\MyHDFstuff\lib-external\szip\bin\windows\szip-msvc++\lib\szlib.lib
+SZIPLIB=
+
+
+#===========================================================================
+#   Do not make any change below this line unless you know what you do
+#===========================================================================
+PATH=$(PATH);$(VCPPDIR)\BIN
+SRCDIR1=$(HDFJAVADIR)\jhdf5
+SRCDIR2=$(HDFJAVADIR)\hdf-java
+
+VALID_PATH_SET=YES
+#-------------------------------------------------------
+# Test if all path is valid
+
+!IF EXISTS("$(VCPPDIR)")
+!ELSE
+!MESSAGE ERROR: Visual C++ directory $(VCPPDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(JAVADIR)")
+!ELSE
+!MESSAGE ERROR: JDK directory $(JAVADIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(SRCDIR1)")
+!ELSE
+!MESSAGE ERROR: C source directory $(SRCDIR1) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(SRCDIR2)")
+!ELSE
+!MESSAGE ERROR: C source directory $(SRCDIR2) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFBUILDDIR)")
+!ELSE
+!MESSAGE ERROR: HDF build directory $(HDFBUILDDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFLIBDIR)")
+!ELSE
+!MESSAGE ERROR: HDF library directory $(HDFLIBDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFINCDIR)")
+!ELSE
+!MESSAGE ERROR: HDF header directory $(HDFINCDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+#!IF EXISTS("$(JPEGLIB)")
+#!ELSE
+#!MESSAGE ERROR: JPEG library does not exist
+#VALID_PATH_SET=NO 
+#!ENDIF
+
+!IF EXISTS("$(GZIPLIB)")
+!ELSE
+!MESSAGE ERROR: GZIP library does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+#!IF EXISTS("$(SZIPLIB)")
+#!ELSE
+#!MESSAGE ERROR: SZIP library does not exist
+#VALID_PATH_SET=NO 
+#!ENDIF
+
+#-------------------------------------------------------
+
+
+!IF "$(VALID_PATH_SET)" == "YES"
+
+!IF "$(OS)" == "Windows_NT"
+NULL=
+!ELSE 
+NULL=nul
+!ENDIF 
+
+INTDIR=.\jhdf5\Release
+OUTDIR=$(HDFJAVADIR)\lib\win
+
+ALL : "$(OUTDIR)\jhdf5.dll"
+
+"$(INTDIR)" :
+    if not exist "$(INTDIR)/$(NULL)" mkdir "$(INTDIR)"
+
+"$(OUTDIR)" :
+    if not exist "$(OUTDIR)/$(NULL)" mkdir "$(OUTDIR)"
+
+CPP=cl.exe
+CPP_PROJ=/nologo /W3 /EHsc /O2 /I "$(HDFINCDIR)" /I "$(HDFBUILDDIR)" /I "$(JAVADIR)\include" /I "$(JAVADIR)\include\win32" /D "WIN32" /D "NDEBUG" /D "_WINDOWS" /Fp"$(INTDIR)\jhdf5.pch" /Fo"$(INTDIR)\\" /Fd"$(INTDIR)\\" /FD /c 
+
+.c{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cpp{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cxx{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.c{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cpp{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cxx{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+MTL=midl.exe
+MTL_PROJ=/nologo /D "NDEBUG" /mktyplib203 /win32 
+RSC=rc.exe
+BSC32=bscmake.exe
+BSC32_FLAGS=/nologo /o"$(INTDIR)\jhdf5.bsc" 
+BSC32_SBRS= \
+	
+LINK32=link.exe
+LINK32_FLAGS=$(HDFLIBDIR)\libhdf5.lib $(SZIPLIB) $(GZIPLIB) /nologo /dll /nodefaultlib:msvcrt /incremental:no /pdb:"$(INTDIR)\jhdf5.pdb" /machine:I386 /out:"$(OUTDIR)\jhdf5.dll" /implib:"$(INTDIR)\jhdf5.lib" 
+LINK32_OBJS= \
+	"$(INTDIR)\exceptionImpJHDF5.obj" \
+	"$(INTDIR)\h5aImpJHDF5.obj" \
+	"$(INTDIR)\h5ConstantsJHDF5.obj" \
+	"$(INTDIR)\h5dImpJHDF5.obj" \
+	"$(INTDIR)\h5fImpJHDF5.obj" \
+	"$(INTDIR)\h5gImpJHDF5.obj" \
+	"$(INTDIR)\h5iImpJHDF5.obj" \
+	"$(INTDIR)\h5ImpJHDF5.obj" \
+	"$(INTDIR)\h5lImpJHDF5.obj" \
+	"$(INTDIR)\h5oImpJHDF5.obj" \
+	"$(INTDIR)\h5pImpJHDF5.obj" \
+	"$(INTDIR)\h5rImpJHDF5.obj" \
+	"$(INTDIR)\h5sImpJHDF5.obj" \
+	"$(INTDIR)\h5tImpJHDF5.obj" \
+	"$(INTDIR)\h5utilJHDF5.obj" \
+	"$(INTDIR)\h5zImpJHDF5.obj" \
+	"$(INTDIR)\strcpyJHDF5.obj" \
+	"$(INTDIR)\h5aImp.obj" \
+	"$(INTDIR)\h5Constants.obj" \
+	"$(INTDIR)\h5dImp.obj" \
+	"$(INTDIR)\h5eImp.obj" \
+	"$(INTDIR)\h5fImp.obj" \
+	"$(INTDIR)\h5gImp.obj" \
+	"$(INTDIR)\h5iImp.obj" \
+	"$(INTDIR)\h5Imp.obj" \
+	"$(INTDIR)\h5lImp.obj" \
+	"$(INTDIR)\h5oImp.obj" \
+	"$(INTDIR)\h5pImp.obj" \
+	"$(INTDIR)\h5rImp.obj" \
+	"$(INTDIR)\h5sImp.obj" \
+	"$(INTDIR)\h5tImp.obj" \
+	"$(INTDIR)\h5util.obj" \
+	"$(INTDIR)\h5zImp.obj" \
+	"$(INTDIR)\nativeData.obj"
+
+"$(OUTDIR)\jhdf5.dll" : "$(OUTDIR)" $(DEF_FILE) $(LINK32_OBJS)
+    $(LINK32) @<<
+  $(LINK32_FLAGS) $(LINK32_OBJS)
+<<
+
+
+SOURCE=$(SRCDIR1)\exceptionImpJHDF5.c
+
+"$(INTDIR)\exceptionImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5aImpJHDF5.c
+
+"$(INTDIR)\h5aImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5ConstantsJHDF5.c
+
+"$(INTDIR)\h5ConstantsJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5dImpJHDF5.c
+
+"$(INTDIR)\h5dImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5fImpJHDF5.c
+
+"$(INTDIR)\h5fImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5gImpJHDF5.c
+
+"$(INTDIR)\h5gImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5iImpJHDF5.c
+
+"$(INTDIR)\h5iImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5ImpJHDF5.c
+
+"$(INTDIR)\h5ImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5lImpJHDF5.c
+
+"$(INTDIR)\h5lImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5oImpJHDF5.c
+
+"$(INTDIR)\h5oImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5pImpJHDF5.c
+
+"$(INTDIR)\h5pImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5rImpJHDF5.c
+
+"$(INTDIR)\h5rImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5sImpJHDF5.c
+
+"$(INTDIR)\h5sImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5tImpJHDF5.c
+
+"$(INTDIR)\h5tImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5utilJHDF5.c
+
+"$(INTDIR)\h5utilJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5zImpJHDF5.c
+
+"$(INTDIR)\h5zImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\strcpyJHDF5.c
+
+"$(INTDIR)\strcpyJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+        $(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+
+
+SOURCE=$(SRCDIR2)\h5aImp.c
+
+"$(INTDIR)\h5aImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5Constants.c
+
+"$(INTDIR)\h5Constants.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5dImp.c
+
+"$(INTDIR)\h5dImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5eImp.c
+
+"$(INTDIR)\h5eImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5fImp.c
+
+"$(INTDIR)\h5fImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5gImp.c
+
+"$(INTDIR)\h5gImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5iImp.c
+
+"$(INTDIR)\h5iImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5Imp.c
+
+"$(INTDIR)\h5Imp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5lImp.c
+
+"$(INTDIR)\h5lImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5oImp.c
+
+"$(INTDIR)\h5oImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5pImp.c
+
+"$(INTDIR)\h5pImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5rImp.c
+
+"$(INTDIR)\h5rImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5sImp.c
+
+"$(INTDIR)\h5sImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5tImp.c
+
+"$(INTDIR)\h5tImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5util.c
+
+"$(INTDIR)\h5util.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5zImp.c
+
+"$(INTDIR)\h5zImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\nativeData.c
+
+"$(INTDIR)\nativeData.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+
+CLEAN :
+	- at erase "$(INTDIR)\exceptionImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5aImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5ConstantsJHDF5.obj"
+	- at erase "$(INTDIR)\h5dImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5fImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5gImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5iImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5lImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5ImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5pImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5rImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5sImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5tImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5oImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5zImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5utilJHDF5.obj"
+        - at erase "$(INTDIR)\strcpyJHDF5.obj"
+	- at erase "$(INTDIR)\h5aImp.obj"
+	- at erase "$(INTDIR)\h5Constants.obj"
+	- at erase "$(INTDIR)\h5dImp.obj"
+	- at erase "$(INTDIR)\h5eImp.obj"
+	- at erase "$(INTDIR)\h5fImp.obj"
+	- at erase "$(INTDIR)\h5gImp.obj"
+	- at erase "$(INTDIR)\h5iImp.obj"
+	- at erase "$(INTDIR)\h5lImp.obj"
+	- at erase "$(INTDIR)\h5Imp.obj"
+	- at erase "$(INTDIR)\h5pImp.obj"
+	- at erase "$(INTDIR)\h5rImp.obj"
+	- at erase "$(INTDIR)\h5sImp.obj"
+	- at erase "$(INTDIR)\h5tImp.obj"
+	- at erase "$(INTDIR)\h5oImp.obj"
+	- at erase "$(INTDIR)\h5zImp.obj"
+	- at erase "$(INTDIR)\h5util.obj"
+	- at erase "$(INTDIR)\nativeData.obj"
+	- at erase "$(INTDIR)\vc90.idb"
+	- at erase "$(INTDIR)\jhdf5.exp"
+	- at erase "$(INTDIR)\jhdf5.lib"
+	- at erase "$(OUTDIR)\jhdf5.dll"
+
+!ENDIF
diff --git a/source/c/compile_windows_x64.mak b/source/c/compile_windows_x64.mak
new file mode 100644
index 0000000..a7f6f20
--- /dev/null
+++ b/source/c/compile_windows_x64.mak
@@ -0,0 +1,469 @@
+#============================================================================
+#
+#              Makefile to compile HDF Java Native C Source
+#              Usage: nmake /f compile_windows_i386.mak
+#
+#============================================================================
+
+# Visual C++ directory, for example
+VCPPDIR=C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC
+
+# Directory where JDK is installed (We require JDK 1.6)
+JAVADIR=C:\Program Files\Java\jdk1.6.0_37
+
+# Common parent directory
+HDFPARENTDIR=C:\JHDF5
+
+# Directory of the HDF Java Products, for example
+HDFJAVADIR=$(HDFPARENTDIR)\jhdf5_src\
+
+# The directory where HDF5 has been compiled
+HDFDIR=$(HDFPARENTDIR)\hdf5\hdf5-1.8.14
+
+# The directory where HDF library is located
+HDFBUILDDIR=$(HDFDIR)\build
+
+# The directory where HDF library is located
+HDFLIBDIR=$(HDFBUILDDIR)\bin\Release
+
+# The directory where HDF header files are located
+HDFINCDIR=$(HDFDIR)\src
+
+# the JPEG library, for example
+#JPEGLIB=E:\Work\MyHDFstuff\lib-external\jpeg\libjpeg.lib
+JPEGLIB=
+
+# the GZIP library, for example
+#GZIPLIB=E:\Work\MyHDFstuff\lib-external\zlib\bin\windows\zlib114\lib\zlib.lib
+GZIPLIB=$(HDFLIBDIR)\zlibstatic.lib
+
+# SZIP library, for example
+#SZIPLIB=E:\Work\MyHDFstuff\lib-external\szip\bin\windows\szip-msvc++\lib\szlib.lib
+SZIPLIB=
+
+
+#===========================================================================
+#   Do not make any change below this line unless you know what you do
+#===========================================================================
+PATH=$(PATH);$(VCPPDIR)\BIN
+SRCDIR1=$(HDFJAVADIR)\jhdf5
+SRCDIR2=$(HDFJAVADIR)\hdf-java
+
+VALID_PATH_SET=YES
+#-------------------------------------------------------
+# Test if all path is valid
+
+!IF EXISTS("$(VCPPDIR)")
+!ELSE
+!MESSAGE ERROR: Visual C++ directory $(VCPPDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(JAVADIR)")
+!ELSE
+!MESSAGE ERROR: JDK directory $(JAVADIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(SRCDIR1)")
+!ELSE
+!MESSAGE ERROR: C source directory $(SRCDIR1) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(SRCDIR2)")
+!ELSE
+!MESSAGE ERROR: C source directory $(SRCDIR2) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFBUILDDIR)")
+!ELSE
+!MESSAGE ERROR: HDF build directory $(HDFBUILDDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFLIBDIR)")
+!ELSE
+!MESSAGE ERROR: HDF library directory $(HDFLIBDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+!IF EXISTS("$(HDFINCDIR)")
+!ELSE
+!MESSAGE ERROR: HDF header directory $(HDFINCDIR) does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+#!IF EXISTS("$(JPEGLIB)")
+#!ELSE
+#!MESSAGE ERROR: JPEG library does not exist
+#VALID_PATH_SET=NO 
+#!ENDIF
+
+!IF EXISTS("$(GZIPLIB)")
+!ELSE
+!MESSAGE ERROR: GZIP library does not exist
+VALID_PATH_SET=NO 
+!ENDIF
+
+#!IF EXISTS("$(SZIPLIB)")
+#!ELSE
+#!MESSAGE ERROR: SZIP library does not exist
+#VALID_PATH_SET=NO 
+#!ENDIF
+
+#-------------------------------------------------------
+
+
+!IF "$(VALID_PATH_SET)" == "YES"
+
+!IF "$(OS)" == "Windows_NT"
+NULL=
+!ELSE 
+NULL=nul
+!ENDIF 
+
+INTDIR=.\jhdf5\Release
+OUTDIR=$(HDFJAVADIR)\lib\win
+
+ALL : "$(OUTDIR)\jhdf5.dll"
+
+"$(INTDIR)" :
+    if not exist "$(INTDIR)/$(NULL)" mkdir "$(INTDIR)"
+
+"$(OUTDIR)" :
+    if not exist "$(OUTDIR)/$(NULL)" mkdir "$(OUTDIR)"
+
+CPP=cl.exe
+CPP_PROJ=/nologo /W3 /EHsc /O2 /I "$(HDFINCDIR)" /I "$(HDFBUILDDIR)" /I "$(JAVADIR)\include" /I "$(JAVADIR)\include\win32" /D "WIN32" /D "NDEBUG" /D "_WINDOWS" /Fp"$(INTDIR)\jhdf5.pch" /Fo"$(INTDIR)\\" /Fd"$(INTDIR)\\" /FD /c 
+
+.c{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cpp{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cxx{$(INTDIR)}.obj::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.c{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cpp{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+.cxx{$(INTDIR)}.sbr::
+   $(CPP) @<<
+   $(CPP_PROJ) $< 
+<<
+
+MTL=midl.exe
+MTL_PROJ=/nologo /D "NDEBUG" /mktyplib203 /win32 
+RSC=rc.exe
+BSC32=bscmake.exe
+BSC32_FLAGS=/nologo /o"$(INTDIR)\jhdf5.bsc" 
+BSC32_SBRS= \
+	
+LINK64=link.exe
+LINK64_FLAGS=$(HDFLIBDIR)\libhdf5.lib $(SZIPLIB) $(GZIPLIB) /nologo /dll /nodefaultlib:msvcrt /incremental:no /pdb:"$(INTDIR)\jhdf5.pdb" /machine:x64 /out:"$(OUTDIR)\jhdf5.dll" /implib:"$(INTDIR)\jhdf5.lib" 
+LINK64_OBJS= \
+	"$(INTDIR)\exceptionImpJHDF5.obj" \
+	"$(INTDIR)\h5aImpJHDF5.obj" \
+	"$(INTDIR)\h5ConstantsJHDF5.obj" \
+	"$(INTDIR)\h5dImpJHDF5.obj" \
+	"$(INTDIR)\h5fImpJHDF5.obj" \
+	"$(INTDIR)\h5gImpJHDF5.obj" \
+	"$(INTDIR)\h5iImpJHDF5.obj" \
+	"$(INTDIR)\h5ImpJHDF5.obj" \
+	"$(INTDIR)\h5lImpJHDF5.obj" \
+	"$(INTDIR)\h5oImpJHDF5.obj" \
+	"$(INTDIR)\h5pImpJHDF5.obj" \
+	"$(INTDIR)\h5rImpJHDF5.obj" \
+	"$(INTDIR)\h5sImpJHDF5.obj" \
+	"$(INTDIR)\h5tImpJHDF5.obj" \
+	"$(INTDIR)\h5utilJHDF5.obj" \
+	"$(INTDIR)\h5zImpJHDF5.obj" \
+        "$(INTDIR)\strcpyJHDF5.obj" \
+	"$(INTDIR)\h5aImp.obj" \
+	"$(INTDIR)\h5Constants.obj" \
+	"$(INTDIR)\h5dImp.obj" \
+	"$(INTDIR)\h5eImp.obj" \
+	"$(INTDIR)\h5fImp.obj" \
+	"$(INTDIR)\h5gImp.obj" \
+	"$(INTDIR)\h5iImp.obj" \
+	"$(INTDIR)\h5Imp.obj" \
+	"$(INTDIR)\h5lImp.obj" \
+	"$(INTDIR)\h5oImp.obj" \
+	"$(INTDIR)\h5pImp.obj" \
+	"$(INTDIR)\h5rImp.obj" \
+	"$(INTDIR)\h5sImp.obj" \
+	"$(INTDIR)\h5tImp.obj" \
+	"$(INTDIR)\h5util.obj" \
+	"$(INTDIR)\h5zImp.obj" \
+	"$(INTDIR)\nativeData.obj"
+
+"$(OUTDIR)\jhdf5.dll" : "$(OUTDIR)" $(DEF_FILE) $(LINK64_OBJS)
+    $(LINK64) @<<
+  $(LINK64_FLAGS) $(LINK64_OBJS)
+<<
+
+
+SOURCE=$(SRCDIR1)\exceptionImpJHDF5.c
+
+"$(INTDIR)\exceptionImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5aImpJHDF5.c
+
+"$(INTDIR)\h5aImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5ConstantsJHDF5.c
+
+"$(INTDIR)\h5ConstantsJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5dImpJHDF5.c
+
+"$(INTDIR)\h5dImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5fImpJHDF5.c
+
+"$(INTDIR)\h5fImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5gImpJHDF5.c
+
+"$(INTDIR)\h5gImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5iImpJHDF5.c
+
+"$(INTDIR)\h5iImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5ImpJHDF5.c
+
+"$(INTDIR)\h5ImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5lImpJHDF5.c
+
+"$(INTDIR)\h5lImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5oImpJHDF5.c
+
+"$(INTDIR)\h5oImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5pImpJHDF5.c
+
+"$(INTDIR)\h5pImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5rImpJHDF5.c
+
+"$(INTDIR)\h5rImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5sImpJHDF5.c
+
+"$(INTDIR)\h5sImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5tImpJHDF5.c
+
+"$(INTDIR)\h5tImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5utilJHDF5.c
+
+"$(INTDIR)\h5utilJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\h5zImpJHDF5.c
+
+"$(INTDIR)\h5zImpJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR1)\strcpyJHDF5.c
+
+"$(INTDIR)\strcpyJHDF5.obj" : $(SOURCE) "$(INTDIR)"
+        $(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+
+
+SOURCE=$(SRCDIR2)\h5aImp.c
+
+"$(INTDIR)\h5aImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5Constants.c
+
+"$(INTDIR)\h5Constants.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5dImp.c
+
+"$(INTDIR)\h5dImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5eImp.c
+
+"$(INTDIR)\h5eImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5fImp.c
+
+"$(INTDIR)\h5fImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5gImp.c
+
+"$(INTDIR)\h5gImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5iImp.c
+
+"$(INTDIR)\h5iImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5Imp.c
+
+"$(INTDIR)\h5Imp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5lImp.c
+
+"$(INTDIR)\h5lImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5oImp.c
+
+"$(INTDIR)\h5oImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5pImp.c
+
+"$(INTDIR)\h5pImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5rImp.c
+
+"$(INTDIR)\h5rImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5sImp.c
+
+"$(INTDIR)\h5sImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5tImp.c
+
+"$(INTDIR)\h5tImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5util.c
+
+"$(INTDIR)\h5util.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\h5zImp.c
+
+"$(INTDIR)\h5zImp.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+SOURCE=$(SRCDIR2)\nativeData.c
+
+"$(INTDIR)\nativeData.obj" : $(SOURCE) "$(INTDIR)"
+	$(CPP) $(CPP_PROJ) $(SOURCE)
+
+
+
+CLEAN :
+	- at erase "$(INTDIR)\exceptionImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5aImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5ConstantsJHDF5.obj"
+	- at erase "$(INTDIR)\h5dImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5fImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5gImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5iImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5lImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5ImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5pImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5rImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5sImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5tImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5oImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5zImpJHDF5.obj"
+	- at erase "$(INTDIR)\h5utilJHDF5.obj"
+        - at erase "$(INTDIR)\strcpyJHDF5.obj"
+	- at erase "$(INTDIR)\h5aImp.obj"
+	- at erase "$(INTDIR)\h5Constants.obj"
+	- at erase "$(INTDIR)\h5dImp.obj"
+	- at erase "$(INTDIR)\h5eImp.obj"
+	- at erase "$(INTDIR)\h5fImp.obj"
+	- at erase "$(INTDIR)\h5gImp.obj"
+	- at erase "$(INTDIR)\h5iImp.obj"
+	- at erase "$(INTDIR)\h5lImp.obj"
+	- at erase "$(INTDIR)\h5Imp.obj"
+	- at erase "$(INTDIR)\h5pImp.obj"
+	- at erase "$(INTDIR)\h5rImp.obj"
+	- at erase "$(INTDIR)\h5sImp.obj"
+	- at erase "$(INTDIR)\h5tImp.obj"
+	- at erase "$(INTDIR)\h5oImp.obj"
+	- at erase "$(INTDIR)\h5zImp.obj"
+	- at erase "$(INTDIR)\h5util.obj"
+	- at erase "$(INTDIR)\nativeData.obj"
+	- at erase "$(INTDIR)\vc90.idb"
+	- at erase "$(INTDIR)\jhdf5.exp"
+	- at erase "$(INTDIR)\jhdf5.lib"
+	- at erase "$(OUTDIR)\jhdf5.dll"
+
+!ENDIF
diff --git a/source/c/create_win_zip.sh b/source/c/create_win_zip.sh
new file mode 100755
index 0000000..8fa6949
--- /dev/null
+++ b/source/c/create_win_zip.sh
@@ -0,0 +1,19 @@
+#! /bin/bash
+
+source version.sh
+
+rm -f hdf5-$VERSION-win.zip
+rm -fR hdf5-$VERSION
+tar xf hdf5-$VERSION.tar
+cd hdf5-$VERSION
+
+patch -s -p0 < ../hdf5_win_compile.diff
+find . -name "*.orig" -exec rm {} \;
+
+cp -f config/cmake/UserMacros/Windows_MT.cmake UserMacros.cmake
+fgres "option (BUILD_STATIC_CRT_LIBS \"Build With Static CRT Libraries\" OFF)" "option (BUILD_STATIC_CRT_LIBS \"Build With Static CRT Libraries\" ON)" UserMacros.cmake
+grep -q -F 'option (BUILD_STATIC_CRT_LIBS "Build With Static CRT Libraries" ON)' UserMacros.cmake || echo "Patching UserMacros.cmake for static build FAILED"
+cd ..
+
+zip -rq hdf5-$VERSION-win.zip hdf5-$VERSION
+rm -fR hdf5-$VERSION
diff --git a/source/c/gcc-4.678-optimizations-config.patch b/source/c/gcc-4.678-optimizations-config.patch
new file mode 100644
index 0000000..4dc1521
--- /dev/null
+++ b/source/c/gcc-4.678-optimizations-config.patch
@@ -0,0 +1,33 @@
+diff -ruN hdf5-1.8.13.orig/config/gnu-flags hdf5-1.8.13/config/gnu-flags
+--- config/gnu-flags	2014-05-06 04:13:21.000000000 +0200
++++ config/gnu-flags	2014-06-06 15:36:09.318183153 +0200
+@@ -260,6 +260,9 @@
+     # (Strictly speaking this isn't really a "warning" flag, so it's added to
+     #   the debugging flags)
+     #DEBUG_CFLAGS="$DEBUG_CFLAGS -Wstack-protector -fstack-protector-all"
++
++    # Enable some additional optimization settings
++    PROD_CFLAGS="$PROD_CFLAGS -floop-interchange -floop-strip-mine -floop-block -fgraphite-identity -fuse-linker-plugin -flto -mtune=corei7"
+     ;;
+ 
+   gcc-4.7*)
+@@ -331,6 +334,9 @@
+     # (Strictly speaking this isn't really a "warning" flag, so it's added to
+     #   the debugging flags)
+     #DEBUG_CFLAGS="$DEBUG_CFLAGS -Wstack-protector -fstack-protector-all"
++
++    # Enable some additional optimization settings
++    PROD_CFLAGS="$PROD_CFLAGS -floop-interchange -floop-strip-mine -floop-block -fgraphite-identity -fuse-linker-plugin -flto -mtune=corei7"
+     ;;
+ 
+   gcc-4.6*)
+@@ -395,6 +401,9 @@
+     # (Strictly speaking this isn't really a "warning" flag, so it's added to
+     #   the debugging flags)
+     #DEBUG_CFLAGS="$DEBUG_CFLAGS -Wstack-protector -fstack-protector-all"
++
++    # Enable some additional optimization settings
++    PROD_CFLAGS="$PROD_CFLAGS -floop-interchange -floop-strip-mine -floop-block -fgraphite-identity -fuse-linker-plugin -flto -mtune=corei7"
+     ;;
+ 
+   gcc-4.5*)
diff --git a/source/c/hdf-java/h5Constants.c b/source/c/hdf-java/h5Constants.c
new file mode 100755
index 0000000..30d431d
--- /dev/null
+++ b/source/c/hdf-java/h5Constants.c
@@ -0,0 +1,661 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include "hdf5.h"
+
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1QUARTER_1HADDR_1MAX(JNIEnv *env, jclass cls) { return (hsize_t)HADDR_MAX/4; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1ALLOW_1K13_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_ALLOW_K13_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1CHIP_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_CHIP_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1EC_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_EC_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1MAX_1PIXELS_1PER_1BLOCK(JNIEnv *env, jclass cls) { return H5_SZIP_MAX_PIXELS_PER_BLOCK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1NN_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_NN_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1UNKNOWN(JNIEnv *env, jclass cls) { return H5_INDEX_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1NAME(JNIEnv *env, jclass cls) { return H5_INDEX_NAME; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1CRT_1ORDER(JNIEnv *env, jclass cls) { return H5_INDEX_CRT_ORDER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1N(JNIEnv *env, jclass cls) { return H5_INDEX_N; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1ITER_1UNKNOWN(JNIEnv *env, jclass cls) { return H5_ITER_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1ITER_1INC(JNIEnv *env, jclass cls) { return H5_ITER_INC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1ITER_1DEC(JNIEnv *env, jclass cls) { return H5_ITER_DEC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1ITER_1NATIVE(JNIEnv *env, jclass cls) { return H5_ITER_NATIVE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5_1ITER_1N(JNIEnv *env, jclass cls) { return H5_ITER_N; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5AC_1CURR_1CACHE_1CONFIG_1VERSION(JNIEnv *env, jclass cls) { return H5AC__CURR_CACHE_CONFIG_VERSION; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5AC_1MAX_1TRACE_1FILE_1NAME_1LEN(JNIEnv *env, jclass cls) { return H5AC__MAX_TRACE_FILE_NAME_LEN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5AC_1METADATA_1WRITE_1STRATEGY_1PROCESS_1ZERO_1ONLY(JNIEnv *env, jclass cls) { 
+#if (H5_VERS_RELEASE >= 6) 
+    return H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY; 
+#else
+    return 0;
+#endif
+}
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5AC_1METADATA_1WRITE_1STRATEGY_1DISTRIBUTED(JNIEnv *env, jclass cls) {
+#if (H5_VERS_RELEASE >= 6) 
+    return H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED;
+#else
+    return 0;
+#endif
+}
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1incr_1off(JNIEnv *env, jclass cls) { return H5C_incr__off; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1incr_1threshold(JNIEnv *env, jclass cls) { return H5C_incr__threshold; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1flash_1incr_1off(JNIEnv *env, jclass cls) { return H5C_flash_incr__off; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1flash_1incr_1add_1space(JNIEnv *env, jclass cls) { return H5C_flash_incr__add_space; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1decr_1off(JNIEnv *env, jclass cls) { return H5C_decr__off; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1decr_1threshold(JNIEnv *env, jclass cls) { return H5C_decr__threshold; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1decr_1age_1out(JNIEnv *env, jclass cls) { return H5C_decr__age_out; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5C_1decr_1age_1out_1with_1threshold(JNIEnv *env, jclass cls) { return H5C_decr__age_out_with_threshold; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1CHUNK_1BTREE(JNIEnv *env, jclass cls) { return H5D_CHUNK_BTREE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1DEFAULT(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1EARLY(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_EARLY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1ERROR(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1INCR(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_INCR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1LATE(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_LATE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1ERROR(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1ALLOC(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_ALLOC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1NEVER(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_NEVER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1IFSET(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_IFSET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1DEFAULT(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1ERROR(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1UNDEFINED(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_UNDEFINED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1USER_1DEFINED(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_USER_DEFINED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1LAYOUT_1ERROR(JNIEnv *env, jclass cls) { return H5D_LAYOUT_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1CHUNKED(JNIEnv *env, jclass cls) { return H5D_CHUNKED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1COMPACT(JNIEnv *env, jclass cls) { return H5D_COMPACT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1CONTIGUOUS(JNIEnv *env, jclass cls) { return H5D_CONTIGUOUS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1NLAYOUTS(JNIEnv *env, jclass cls) { return H5D_NLAYOUTS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_ALLOCATED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1ERROR(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1NOT_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_NOT_ALLOCATED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1PART_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_PART_ALLOCATED; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ALIGNMENT(JNIEnv *env, jclass cls) { return H5E_ALIGNMENT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ALREADYEXISTS(JNIEnv *env, jclass cls) { return H5E_ALREADYEXISTS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ALREADYINIT(JNIEnv *env, jclass cls) { return H5E_ALREADYINIT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ARGS(JNIEnv *env, jclass cls) { return H5E_ARGS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ATOM(JNIEnv *env, jclass cls) { return H5E_ATOM; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1ATTR(JNIEnv *env, jclass cls) { return H5E_ATTR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADATOM(JNIEnv *env, jclass cls) { return H5E_BADATOM; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADFILE(JNIEnv *env, jclass cls) { return H5E_BADFILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADGROUP(JNIEnv *env, jclass cls) { return H5E_BADGROUP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADMESG(JNIEnv *env, jclass cls) { return H5E_BADMESG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADRANGE(JNIEnv *env, jclass cls) { return H5E_BADRANGE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADSELECT(JNIEnv *env, jclass cls) { return H5E_BADSELECT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADSIZE(JNIEnv *env, jclass cls) { return H5E_BADSIZE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADTYPE(JNIEnv *env, jclass cls) { return H5E_BADTYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BADVALUE(JNIEnv *env, jclass cls) { return H5E_BADVALUE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1BTREE(JNIEnv *env, jclass cls) { return H5E_BTREE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CACHE(JNIEnv *env, jclass cls) { return H5E_CACHE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CALLBACK(JNIEnv *env, jclass cls) { return H5E_CALLBACK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANAPPLY(JNIEnv *env, jclass cls) { return H5E_CANAPPLY; }
+/*JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTALLOC(JNIEnv *env, jclass cls) { return H5E_CANTALLOC; }*/
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCLIP(JNIEnv *env, jclass cls) { return H5E_CANTCLIP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCLOSEFILE(JNIEnv *env, jclass cls) { return H5E_CANTCLOSEFILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCONVERT(JNIEnv *env, jclass cls) { return H5E_CANTCONVERT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCOPY(JNIEnv *env, jclass cls) { return H5E_CANTCOPY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCOUNT(JNIEnv *env, jclass cls) { return H5E_CANTCOUNT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTCREATE(JNIEnv *env, jclass cls) { return H5E_CANTCREATE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTDEC(JNIEnv *env, jclass cls) { return H5E_CANTDEC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTDECODE(JNIEnv *env, jclass cls) { return H5E_CANTDECODE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTDELETE(JNIEnv *env, jclass cls) { return H5E_CANTDELETE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTENCODE(JNIEnv *env, jclass cls) { return H5E_CANTENCODE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTFLUSH(JNIEnv *env, jclass cls) { return H5E_CANTFLUSH; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTFREE(JNIEnv *env, jclass cls) { return H5E_CANTFREE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTGET(JNIEnv *env, jclass cls) { return H5E_CANTGET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTINC(JNIEnv *env, jclass cls) { return H5E_CANTINC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTINIT(JNIEnv *env, jclass cls) { return H5E_CANTINIT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTINSERT(JNIEnv *env, jclass cls) { return H5E_CANTINSERT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTLIST(JNIEnv *env, jclass cls) { return H5E_CANTLIST; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTLOAD(JNIEnv *env, jclass cls) { return H5E_CANTLOAD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTLOCK(JNIEnv *env, jclass cls) { return H5E_CANTLOCK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTNEXT(JNIEnv *env, jclass cls) { return H5E_CANTNEXT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTOPENFILE(JNIEnv *env, jclass cls) { return H5E_CANTOPENFILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTOPENOBJ(JNIEnv *env, jclass cls) { return H5E_CANTOPENOBJ; }
+/*JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTRECV(JNIEnv *env, jclass cls) { return H5E_CANTRECV; }*/
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTREGISTER(JNIEnv *env, jclass cls) { return H5E_CANTREGISTER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTRELEASE(JNIEnv *env, jclass cls) { return H5E_CANTRELEASE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTSELECT(JNIEnv *env, jclass cls) { return H5E_CANTSELECT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTSET(JNIEnv *env, jclass cls) { return H5E_CANTSET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTSPLIT(JNIEnv *env, jclass cls) { return H5E_CANTSPLIT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CANTUNLOCK(JNIEnv *env, jclass cls) { return H5E_CANTUNLOCK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1CLOSEERROR(JNIEnv *env, jclass cls) { return H5E_CLOSEERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1COMPLEN(JNIEnv *env, jclass cls) { return H5E_COMPLEN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1DATASET(JNIEnv *env, jclass cls) { return H5E_DATASET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1DATASPACE(JNIEnv *env, jclass cls) { return H5E_DATASPACE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1DATATYPE(JNIEnv *env, jclass cls) { return H5E_DATATYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1DEFAULT(JNIEnv *env, jclass cls) { return H5E_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1DUPCLASS(JNIEnv *env, jclass cls) { return H5E_DUPCLASS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1EFL(JNIEnv *env, jclass cls) { return H5E_EFL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1EXISTS(JNIEnv *env, jclass cls) { return H5E_EXISTS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1FCNTL(JNIEnv *env, jclass cls) { return H5E_FCNTL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1FILE(JNIEnv *env, jclass cls) { return H5E_FILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1FILEEXISTS(JNIEnv *env, jclass cls) { return H5E_FILEEXISTS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1FILEOPEN(JNIEnv *env, jclass cls) { return H5E_FILEOPEN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1FUNC(JNIEnv *env, jclass cls) { return H5E_FUNC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1HEAP(JNIEnv *env, jclass cls) { return H5E_HEAP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1INTERNAL(JNIEnv *env, jclass cls) { return H5E_INTERNAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1IO(JNIEnv *env, jclass cls) { return H5E_IO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1LINK(JNIEnv *env, jclass cls) { return H5E_LINK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1LINKCOUNT(JNIEnv *env, jclass cls) { return H5E_LINKCOUNT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1MAJOR(JNIEnv *env, jclass cls) { return H5E_MAJOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1MINOR(JNIEnv *env, jclass cls) { return H5E_MINOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1MOUNT(JNIEnv *env, jclass cls) { return H5E_MOUNT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1MPI(JNIEnv *env, jclass cls) { return H5E_MPI; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1MPIERRSTR(JNIEnv *env, jclass cls) { return H5E_MPIERRSTR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOFILTER(JNIEnv *env, jclass cls) { return H5E_NOFILTER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOIDS(JNIEnv *env, jclass cls) { return H5E_NOIDS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NONE_1MAJOR(JNIEnv *env, jclass cls) { return H5E_NONE_MAJOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NONE_1MINOR(JNIEnv *env, jclass cls) { return H5E_NONE_MINOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOSPACE(JNIEnv *env, jclass cls) { return H5E_NOSPACE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOTCACHED(JNIEnv *env, jclass cls) { return H5E_NOTCACHED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOTFOUND(JNIEnv *env, jclass cls) { return H5E_NOTFOUND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1NOTHDF5(JNIEnv *env, jclass cls) { return H5E_NOTHDF5; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1OHDR(JNIEnv *env, jclass cls) { return H5E_OHDR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1OVERFLOW(JNIEnv *env, jclass cls) { return H5E_OVERFLOW; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1PLINE(JNIEnv *env, jclass cls) { return H5E_PLINE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1PLIST(JNIEnv *env, jclass cls) { return H5E_PLIST; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1PROTECT(JNIEnv *env, jclass cls) { return H5E_PROTECT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1READERROR(JNIEnv *env, jclass cls) { return H5E_READERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1REFERENCE(JNIEnv *env, jclass cls) { return H5E_REFERENCE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1RESOURCE(JNIEnv *env, jclass cls) { return H5E_RESOURCE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1RS(JNIEnv *env, jclass cls) { return H5E_RS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1SEEKERROR(JNIEnv *env, jclass cls) { return H5E_SEEKERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1SETLOCAL(JNIEnv *env, jclass cls) { return H5E_SETLOCAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1STORAGE(JNIEnv *env, jclass cls) { return H5E_STORAGE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1SYM(JNIEnv *env, jclass cls) { return H5E_SYM; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1TRUNCATED(JNIEnv *env, jclass cls) { return H5E_TRUNCATED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1TST(JNIEnv *env, jclass cls) { return H5E_TST; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1UNINITIALIZED(JNIEnv *env, jclass cls) { return H5E_UNINITIALIZED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1UNSUPPORTED(JNIEnv *env, jclass cls) { return H5E_UNSUPPORTED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1VERSION(JNIEnv *env, jclass cls) { return H5E_VERSION; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1VFL(JNIEnv *env, jclass cls) { return H5E_VFL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1WALK_1DOWNWARD(JNIEnv *env, jclass cls) { return H5E_WALK_DOWNWARD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1WALK_1UPWARD(JNIEnv *env, jclass cls) { return H5E_WALK_UPWARD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5E_1WRITEERROR(JNIEnv *env, jclass cls) { return H5E_WRITEERROR; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1CREAT(JNIEnv *env, jclass cls) { return H5F_ACC_CREAT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1DEBUG(JNIEnv *env, jclass cls) { return H5F_ACC_DEBUG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1EXCL(JNIEnv *env, jclass cls) { return H5F_ACC_EXCL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1RDONLY(JNIEnv *env, jclass cls) { return H5F_ACC_RDONLY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1RDWR(JNIEnv *env, jclass cls) { return H5F_ACC_RDWR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1TRUNC(JNIEnv *env, jclass cls) { return H5F_ACC_TRUNC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1DEFAULT(JNIEnv *env, jclass cls) { return H5F_ACC_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1DEFAULT(JNIEnv *env, jclass cls) { return H5F_CLOSE_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1SEMI(JNIEnv *env, jclass cls) { return H5F_CLOSE_SEMI; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1STRONG(JNIEnv *env, jclass cls) { return H5F_CLOSE_STRONG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1WEAK(JNIEnv *env, jclass cls) { return H5F_CLOSE_WEAK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1EARLIEST(JNIEnv *env, jclass cls){return H5F_LIBVER_EARLIEST;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1LATEST(JNIEnv *env, jclass cls){return H5F_LIBVER_LATEST;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1ALL(JNIEnv *env, jclass cls) { return H5F_OBJ_ALL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1ATTR(JNIEnv *env, jclass cls) { return H5F_OBJ_ATTR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1DATASET(JNIEnv *env, jclass cls) { return H5F_OBJ_DATASET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1DATATYPE(JNIEnv *env, jclass cls) { return H5F_OBJ_DATATYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1FILE(JNIEnv *env, jclass cls) { return H5F_OBJ_FILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1GROUP(JNIEnv *env, jclass cls) { return H5F_OBJ_GROUP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1LOCAL(JNIEnv *env, jclass cls) { return H5F_OBJ_LOCAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1SCOPE_1GLOBAL(JNIEnv *env, jclass cls) { return H5F_SCOPE_GLOBAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1SCOPE_1LOCAL(JNIEnv *env, jclass cls) { return H5F_SCOPE_LOCAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5F_1UNLIMITED(JNIEnv *env, jclass cls) { return (jint)H5F_UNLIMITED; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1CORE(JNIEnv *env, jclass cls) { return H5FD_CORE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1DIRECT(JNIEnv *env, jclass cls) { 
+#ifdef H5_HAVE_DIRECT
+    return H5FD_DIRECT;
+#else
+    return -1;
+#endif
+}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1FAMILY(JNIEnv *env, jclass cls) { return H5FD_FAMILY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG(JNIEnv *env, jclass cls) { return H5FD_LOG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MPIO(JNIEnv *env, jclass cls) { return H5FD_MPIO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MULTI(JNIEnv *env, jclass cls) { return H5FD_MULTI; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1SEC2(JNIEnv *env, jclass cls) { return H5FD_SEC2; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1STDIO(JNIEnv *env, jclass cls) { return H5FD_STDIO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1WINDOWS(JNIEnv *env, jclass cls) { 
+#ifdef H5_HAVE_WINDOWS
+    return H5FD_DIRECT;
+#else
+    return -1;
+#endif
+}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_READ; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_WRITE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_SEEK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_IO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_READ; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_WRITE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_IO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FLAVOR(JNIEnv *env, jclass cls) { return H5FD_LOG_FLAVOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_READ; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_WRITE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_SEEK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1TRUNCATE(JNIEnv *env, jclass cls) {
+#if (H5_VERS_RELEASE > 6) /* H5_VERSION_GE(1,8,7) */
+    return H5FD_LOG_NUM_TRUNCATE; 
+#else
+    return 0;
+#endif
+}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_IO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1OPEN(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_OPEN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1STAT(JNIEnv *env, jclass cls) {
+#if (H5_VERS_RELEASE > 6) /* H5_VERSION_GE(1,8,7) */
+    return H5FD_LOG_TIME_STAT; 
+#else
+    return 0;
+#endif
+}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_READ; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_WRITE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_SEEK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1CLOSE(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_CLOSE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_IO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1ALLOC(JNIEnv *env, jclass cls) { return H5FD_LOG_ALLOC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1ALL(JNIEnv *env, jclass cls) { return H5FD_LOG_ALL; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1NOLIST(JNIEnv *env, jclass cls) { return H5FD_MEM_NOLIST; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT(JNIEnv *env, jclass cls) { return H5FD_MEM_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1SUPER(JNIEnv *env, jclass cls) { return H5FD_MEM_SUPER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1BTREE(JNIEnv *env, jclass cls) { return H5FD_MEM_BTREE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DRAW(JNIEnv *env, jclass cls) { return H5FD_MEM_DRAW; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1GHEAP(JNIEnv *env, jclass cls) { return H5FD_MEM_GHEAP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1LHEAP(JNIEnv *env, jclass cls) { return H5FD_MEM_LHEAP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1OHDR(JNIEnv *env, jclass cls) { return H5FD_MEM_OHDR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1NTYPES(JNIEnv *env, jclass cls) { return H5FD_MEM_NTYPES; }
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5FD_1DEFAULT_1HADDR_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)HADDR_MAX/H5FD_MEM_NTYPES; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1DATASET(JNIEnv *env, jclass cls) { return H5G_DATASET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1GROUP(JNIEnv *env, jclass cls) { return H5G_GROUP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1LINK(JNIEnv *env, jclass cls) { return H5G_LINK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1UDLINK(JNIEnv *env, jclass cls) { return H5G_UDLINK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1ERROR(JNIEnv *env, jclass cls) { return H5G_LINK_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1HARD(JNIEnv *env, jclass cls) { return H5G_LINK_HARD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1SOFT(JNIEnv *env, jclass cls) { return H5G_LINK_SOFT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1NLIBTYPES(JNIEnv *env, jclass cls) { return H5G_NLIBTYPES; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1NTYPES(JNIEnv *env, jclass cls) { return H5G_NTYPES; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1NUSERTYPES(JNIEnv *env, jclass cls) { return H5G_NUSERTYPES; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_15(JNIEnv *env, jclass cls) { return H5G_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_16(JNIEnv *env, jclass cls) { return H5G_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_17(JNIEnv *env, jclass cls) { return H5G_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1SAME_1LOC(JNIEnv *env, jclass cls) { return H5G_SAME_LOC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1UNKNOWN(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1SYMBOL_1TABLE(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_SYMBOL_TABLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1COMPACT(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_COMPACT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1DENSE(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_DENSE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1TYPE(JNIEnv *env, jclass cls) { return H5G_TYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5G_1UNKNOWN(JNIEnv *env, jclass cls) { return H5G_UNKNOWN; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1ATTR(JNIEnv *env, jclass cls) { return H5I_ATTR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1BADID(JNIEnv *env, jclass cls) { return H5I_BADID; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1DATASET(JNIEnv *env, jclass cls) { return H5I_DATASET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1DATASPACE(JNIEnv *env, jclass cls) { return H5I_DATASPACE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1DATATYPE(JNIEnv *env, jclass cls) { return H5I_DATATYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1FILE(JNIEnv *env, jclass cls) { return H5I_FILE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1GENPROP_1CLS(JNIEnv *env, jclass cls) { return H5I_GENPROP_CLS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1GENPROP_1LST(JNIEnv *env, jclass cls) { return H5I_GENPROP_LST; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1GROUP(JNIEnv *env, jclass cls) { return H5I_GROUP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1INVALID_1HID(JNIEnv *env, jclass cls) { return H5I_INVALID_HID; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1REFERENCE(JNIEnv *env, jclass cls) { return H5I_REFERENCE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5I_1VFL(JNIEnv *env, jclass cls) { return H5I_VFL; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1ERROR(JNIEnv *env, jclass cls) { return H5L_TYPE_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1HARD(JNIEnv *env, jclass cls) { return H5L_TYPE_HARD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1SOFT(JNIEnv *env, jclass cls) { return H5L_TYPE_SOFT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1EXTERNAL(JNIEnv *env, jclass cls) { return H5L_TYPE_EXTERNAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1MAX(JNIEnv *env, jclass cls) { return H5L_TYPE_MAX; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1SHALLOW_1HIERARCHY_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_SHALLOW_HIERARCHY_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1SOFT_1LINK_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_SOFT_LINK_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1EXT_1LINK_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_EXT_LINK_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1REFERENCE_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_REFERENCE_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1WITHOUT_1ATTR_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_WITHOUT_ATTR_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1PRESERVE_1NULL_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_PRESERVE_NULL_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1NONE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_NONE_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1SDSPACE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_SDSPACE_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1DTYPE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_DTYPE_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1FILL_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_FILL_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1PLINE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_PLINE_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1ATTR_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_ATTR_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1ALL_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_ALL_FLAG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1UNKNOWN(JNIEnv *env, jclass cls) { return H5O_TYPE_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1GROUP(JNIEnv *env, jclass cls) { return H5O_TYPE_GROUP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1DATASET(JNIEnv *env, jclass cls) { return H5O_TYPE_DATASET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1NAMED_1DATATYPE(JNIEnv *env, jclass cls) { return H5O_TYPE_NAMED_DATATYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1NTYPES(JNIEnv *env, jclass cls) { return H5O_TYPE_NTYPES; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1ROOT(JNIEnv *env, jclass cls){return H5P_ROOT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1CREATE(JNIEnv *env, jclass cls){return H5P_OBJECT_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1CREATE(JNIEnv *env, jclass cls){return H5P_FILE_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1ACCESS(JNIEnv *env, jclass cls){return H5P_FILE_ACCESS;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1CREATE(JNIEnv *env, jclass cls){return H5P_DATASET_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1ACCESS(JNIEnv *env, jclass cls){return H5P_DATASET_ACCESS;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1XFER(JNIEnv *env, jclass cls){return H5P_DATASET_XFER;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1MOUNT(JNIEnv *env, jclass cls){return H5P_FILE_MOUNT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1CREATE(JNIEnv *env, jclass cls){return H5P_GROUP_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1ACCESS(JNIEnv *env, jclass cls){return H5P_GROUP_ACCESS;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1CREATE(JNIEnv *env, jclass cls){return H5P_DATATYPE_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1ACCESS(JNIEnv *env, jclass cls){return H5P_DATATYPE_ACCESS;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1STRING_1CREATE(JNIEnv *env, jclass cls){return H5P_STRING_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1CREATE(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1COPY(JNIEnv *env, jclass cls){return H5P_OBJECT_COPY;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1CREATE(JNIEnv *env, jclass cls){return H5P_LINK_CREATE;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1ACCESS(JNIEnv *env, jclass cls){return H5P_LINK_ACCESS;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1XFER_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_XFER_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1MOUNT_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_MOUNT_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_GROUP_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_GROUP_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATATYPE_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATATYPE_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1COPY_1DEFAULT(JNIEnv *env, jclass cls){return H5P_OBJECT_COPY_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_LINK_CREATE_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_LINK_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1CRT_1ORDER_1TRACKED(JNIEnv *env, jclass cls){return H5P_CRT_ORDER_TRACKED;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1CRT_1ORDER_1INDEXED(JNIEnv *env, jclass cls){return H5P_CRT_ORDER_INDEXED;}
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1DEFAULT(JNIEnv *env, jclass cls) { return H5P_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5P_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5P_NO_CLASS; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1BADTYPE(JNIEnv *env, jclass cls) { return H5R_BADTYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1MAXTYPE(JNIEnv *env, jclass cls) { return H5R_MAXTYPE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1OBJ_1REF_1BUF_1SIZE(JNIEnv *env, jclass cls) { return H5R_OBJ_REF_BUF_SIZE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1DSET_1REG_1REF_1BUF_1SIZE(JNIEnv *env, jclass cls) { return H5R_DSET_REG_REF_BUF_SIZE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1OBJECT(JNIEnv *env, jclass cls) { return H5R_OBJECT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5R_1DATASET_1REGION(JNIEnv *env, jclass cls) { return H5R_DATASET_REGION; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1ALL(JNIEnv *env, jclass cls) { return H5S_ALL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1MAX_1RANK(JNIEnv *env, jclass cls) { return H5S_MAX_RANK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5S_NO_CLASS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1NULL(JNIEnv *env, jclass cls) { return H5S_NULL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SCALAR(JNIEnv *env, jclass cls) { return H5S_SCALAR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1ALL(JNIEnv *env, jclass cls) { return H5S_SEL_ALL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1ERROR(JNIEnv *env, jclass cls) { return H5S_SEL_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1HYPERSLABS(JNIEnv *env, jclass cls) { return H5S_SEL_HYPERSLABS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1N(JNIEnv *env, jclass cls) { return H5S_SEL_N; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1NONE(JNIEnv *env, jclass cls) { return H5S_SEL_NONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1POINTS(JNIEnv *env, jclass cls) { return H5S_SEL_POINTS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1AND(JNIEnv *env, jclass cls) { return H5S_SELECT_AND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1APPEND(JNIEnv *env, jclass cls) { return H5S_SELECT_APPEND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1INVALID(JNIEnv *env, jclass cls) { return H5S_SELECT_INVALID; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOOP(JNIEnv *env, jclass cls) { return H5S_SELECT_NOOP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOTA(JNIEnv *env, jclass cls) { return H5S_SELECT_NOTA; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOTB(JNIEnv *env, jclass cls) { return H5S_SELECT_NOTB; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1OR(JNIEnv *env, jclass cls) { return H5S_SELECT_OR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1PREPEND(JNIEnv *env, jclass cls) { return H5S_SELECT_PREPEND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1SET(JNIEnv *env, jclass cls) { return H5S_SELECT_SET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1XOR(JNIEnv *env, jclass cls) { return H5S_SELECT_XOR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1SIMPLE(JNIEnv *env, jclass cls) { return H5S_SIMPLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5S_1UNLIMITED(JNIEnv *env, jclass cls) { return (jint)H5S_UNLIMITED; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B16(JNIEnv *env, jclass cls) { return H5T_ALPHA_B16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B32(JNIEnv *env, jclass cls) { return H5T_ALPHA_B32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B64(JNIEnv *env, jclass cls) { return H5T_ALPHA_B64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B8(JNIEnv *env, jclass cls) { return H5T_ALPHA_B8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1F32(JNIEnv *env, jclass cls) { return H5T_ALPHA_F32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1F64(JNIEnv *env, jclass cls) { return H5T_ALPHA_F64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I16(JNIEnv *env, jclass cls) { return H5T_ALPHA_I16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I32(JNIEnv *env, jclass cls) { return H5T_ALPHA_I32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I64(JNIEnv *env, jclass cls) { return H5T_ALPHA_I64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I8(JNIEnv *env, jclass cls) { return H5T_ALPHA_I8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U16(JNIEnv *env, jclass cls) { return H5T_ALPHA_U16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U32(JNIEnv *env, jclass cls) { return H5T_ALPHA_U32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U64(JNIEnv *env, jclass cls) { return H5T_ALPHA_U64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U8(JNIEnv *env, jclass cls) { return H5T_ALPHA_U8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ARRAY(JNIEnv *env, jclass cls) { return H5T_ARRAY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1BITFIELD(JNIEnv *env, jclass cls) { return H5T_BITFIELD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1BKG_1NO(JNIEnv *env, jclass cls) { return H5T_BKG_NO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1BKG_1YES(JNIEnv *env, jclass cls) { return H5T_BKG_YES; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1C_1S1(JNIEnv *env, jclass cls) { return H5T_C_S1; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1COMPOUND(JNIEnv *env, jclass cls) { return H5T_COMPOUND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1CONV(JNIEnv *env, jclass cls) { return H5T_CONV_CONV; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1FREE(JNIEnv *env, jclass cls) { return H5T_CONV_FREE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1INIT(JNIEnv *env, jclass cls) { return H5T_CONV_INIT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1ERROR(JNIEnv *env, jclass cls) { return H5T_CSET_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1ASCII(JNIEnv *env, jclass cls) { return H5T_CSET_ASCII; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1UTF8(JNIEnv *env, jclass cls) { return H5T_CSET_UTF8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_110(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_10; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_111(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_11; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_112(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_12; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_113(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_13; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_114(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_14; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_115(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_15; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_12(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_2; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_13(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_3; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_14(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_4; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_15(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_16(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_17(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_18(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_19(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_9; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1ASCEND(JNIEnv *env, jclass cls) { return H5T_DIR_ASCEND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1DEFAULT(JNIEnv *env, jclass cls) { return H5T_DIR_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1DESCEND(JNIEnv *env, jclass cls) { return H5T_DIR_DESCEND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ENUM(JNIEnv *env, jclass cls) { return H5T_ENUM; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1FLOAT(JNIEnv *env, jclass cls) { return H5T_FLOAT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1FORTRAN_1S1(JNIEnv *env, jclass cls) { return H5T_FORTRAN_S1; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F32BE(JNIEnv *env, jclass cls) { return H5T_IEEE_F32BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F32LE(JNIEnv *env, jclass cls) { return H5T_IEEE_F32LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F64BE(JNIEnv *env, jclass cls) { return H5T_IEEE_F64BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F64LE(JNIEnv *env, jclass cls) { return H5T_IEEE_F64LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEGER(JNIEnv *env, jclass cls) { return H5T_INTEGER; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B16(JNIEnv *env, jclass cls) { return H5T_INTEL_B16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B32(JNIEnv *env, jclass cls) { return H5T_INTEL_B32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B64(JNIEnv *env, jclass cls) { return H5T_INTEL_B64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B8(JNIEnv *env, jclass cls) { return H5T_INTEL_B8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1F32(JNIEnv *env, jclass cls) { return H5T_INTEL_F32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1F64(JNIEnv *env, jclass cls) { return H5T_INTEL_F64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I16(JNIEnv *env, jclass cls) { return H5T_INTEL_I16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I32(JNIEnv *env, jclass cls) { return H5T_INTEL_I32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I64(JNIEnv *env, jclass cls) { return H5T_INTEL_I64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I8(JNIEnv *env, jclass cls) { return H5T_INTEL_I8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U16(JNIEnv *env, jclass cls) { return H5T_INTEL_U16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U32(JNIEnv *env, jclass cls) { return H5T_INTEL_U32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U64(JNIEnv *env, jclass cls) { return H5T_INTEL_U64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U8(JNIEnv *env, jclass cls) { return H5T_INTEL_U8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B16(JNIEnv *env, jclass cls) { return H5T_MIPS_B16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B32(JNIEnv *env, jclass cls) { return H5T_MIPS_B32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B64(JNIEnv *env, jclass cls) { return H5T_MIPS_B64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B8(JNIEnv *env, jclass cls) { return H5T_MIPS_B8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1F32(JNIEnv *env, jclass cls) { return H5T_MIPS_F32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1F64(JNIEnv *env, jclass cls) { return H5T_MIPS_F64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I16(JNIEnv *env, jclass cls) { return H5T_MIPS_I16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I32(JNIEnv *env, jclass cls) { return H5T_MIPS_I32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I64(JNIEnv *env, jclass cls) { return H5T_MIPS_I64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I8(JNIEnv *env, jclass cls) { return H5T_MIPS_I8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U16(JNIEnv *env, jclass cls) { return H5T_MIPS_U16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U32(JNIEnv *env, jclass cls) { return H5T_MIPS_U32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U64(JNIEnv *env, jclass cls) { return H5T_MIPS_U64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U8(JNIEnv *env, jclass cls) { return H5T_MIPS_U8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B16(JNIEnv *env, jclass cls) { return H5T_NATIVE_B16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B32(JNIEnv *env, jclass cls) { return H5T_NATIVE_B32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B64(JNIEnv *env, jclass cls) { return H5T_NATIVE_B64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B8(JNIEnv *env, jclass cls) { return H5T_NATIVE_B8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1CHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_CHAR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1DOUBLE(JNIEnv *env, jclass cls) { return H5T_NATIVE_DOUBLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1FLOAT(JNIEnv *env, jclass cls) { return H5T_NATIVE_FLOAT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HADDR(JNIEnv *env, jclass cls) { return H5T_NATIVE_HADDR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HBOOL(JNIEnv *env, jclass cls) { return H5T_NATIVE_HBOOL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HERR(JNIEnv *env, jclass cls) { return H5T_NATIVE_HERR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HSIZE(JNIEnv *env, jclass cls) { return H5T_NATIVE_HSIZE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HSSIZE(JNIEnv *env, jclass cls) { return H5T_NATIVE_HSSIZE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LDOUBLE(JNIEnv *env, jclass cls) { return H5T_NATIVE_LDOUBLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LLONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_LLONG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_LONG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1OPAQUE(JNIEnv *env, jclass cls) { return H5T_NATIVE_OPAQUE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1SCHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_SCHAR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1SHORT(JNIEnv *env, jclass cls) { return H5T_NATIVE_SHORT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UCHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_UCHAR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT16; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT64; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1ULLONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_ULLONG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1ULONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_ULONG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1USHORT(JNIEnv *env, jclass cls) { return H5T_NATIVE_USHORT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NCLASSES(JNIEnv *env, jclass cls) { return H5T_NCLASSES; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5T_NO_CLASS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1ERROR(JNIEnv *env, jclass cls) { return H5T_NORM_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1IMPLIED(JNIEnv *env, jclass cls) { return H5T_NORM_IMPLIED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1MSBSET(JNIEnv *env, jclass cls) { return H5T_NORM_MSBSET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1NONE(JNIEnv *env, jclass cls) { return H5T_NORM_NONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NPAD(JNIEnv *env, jclass cls) { return H5T_NPAD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1NSGN(JNIEnv *env, jclass cls) { return H5T_NSGN; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1OPAQUE(JNIEnv *env, jclass cls) { return H5T_OPAQUE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1OPAQUE_1TAG_1MAX(JNIEnv *env, jclass cls) { return H5T_OPAQUE_TAG_MAX; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1BE(JNIEnv *env, jclass cls) { return H5T_ORDER_BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1ERROR(JNIEnv *env, jclass cls) { return H5T_ORDER_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1LE(JNIEnv *env, jclass cls) { return H5T_ORDER_LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1NONE(JNIEnv *env, jclass cls) { return H5T_ORDER_NONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1VAX(JNIEnv *env, jclass cls) { return H5T_ORDER_VAX; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1BACKGROUND(JNIEnv *env, jclass cls) { return H5T_PAD_BACKGROUND; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ERROR(JNIEnv *env, jclass cls) { return H5T_PAD_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ONE(JNIEnv *env, jclass cls) { return H5T_PAD_ONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ZERO(JNIEnv *env, jclass cls) { return H5T_PAD_ZERO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1DONTCARE(JNIEnv *env, jclass cls) { return H5T_PERS_DONTCARE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1HARD(JNIEnv *env, jclass cls) { return H5T_PERS_HARD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1SOFT(JNIEnv *env, jclass cls) { return H5T_PERS_SOFT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1REFERENCE(JNIEnv *env, jclass cls) { return H5T_REFERENCE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1SGN_12(JNIEnv *env, jclass cls) { return H5T_SGN_2; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1SGN_1ERROR(JNIEnv *env, jclass cls) { return H5T_SGN_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1SGN_1NONE(JNIEnv *env, jclass cls) { return H5T_SGN_NONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B16BE(JNIEnv *env, jclass cls) { return H5T_STD_B16BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B16LE(JNIEnv *env, jclass cls) { return H5T_STD_B16LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B32BE(JNIEnv *env, jclass cls) { return H5T_STD_B32BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B32LE(JNIEnv *env, jclass cls) { return H5T_STD_B32LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B64BE(JNIEnv *env, jclass cls) { return H5T_STD_B64BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B64LE(JNIEnv *env, jclass cls) { return H5T_STD_B64LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B8BE(JNIEnv *env, jclass cls) { return H5T_STD_B8BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B8LE(JNIEnv *env, jclass cls) { return H5T_STD_B8LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I16BE(JNIEnv *env, jclass cls) { return H5T_STD_I16BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I16LE(JNIEnv *env, jclass cls) { return H5T_STD_I16LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I32BE(JNIEnv *env, jclass cls) { return H5T_STD_I32BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I32LE(JNIEnv *env, jclass cls) { return H5T_STD_I32LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I64BE(JNIEnv *env, jclass cls) { return H5T_STD_I64BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I64LE(JNIEnv *env, jclass cls) { return H5T_STD_I64LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I8BE(JNIEnv *env, jclass cls) { return H5T_STD_I8BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I8LE(JNIEnv *env, jclass cls) { return H5T_STD_I8LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1REF_1DSETREG(JNIEnv *env, jclass cls) { return H5T_STD_REF_DSETREG; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1REF_1OBJ(JNIEnv *env, jclass cls) { return H5T_STD_REF_OBJ; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U16BE(JNIEnv *env, jclass cls) { return H5T_STD_U16BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U16LE(JNIEnv *env, jclass cls) { return H5T_STD_U16LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U32BE(JNIEnv *env, jclass cls) { return H5T_STD_U32BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U32LE(JNIEnv *env, jclass cls) { return H5T_STD_U32LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U64BE(JNIEnv *env, jclass cls) { return H5T_STD_U64BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U64LE(JNIEnv *env, jclass cls) { return H5T_STD_U64LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U8BE(JNIEnv *env, jclass cls) { return H5T_STD_U8BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U8LE(JNIEnv *env, jclass cls) { return H5T_STD_U8LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1ERROR(JNIEnv *env, jclass cls) { return H5T_STR_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1NULLPAD(JNIEnv *env, jclass cls) { return H5T_STR_NULLPAD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1NULLTERM(JNIEnv *env, jclass cls) { return H5T_STR_NULLTERM; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_110(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_10; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_111(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_11; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_112(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_12; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_113(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_13; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_114(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_14; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_115(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_15; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_13(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_3; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_14(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_4; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_15(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_16(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_17(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_18(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_8; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_19(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_9; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STR_1SPACEPAD(JNIEnv *env, jclass cls) { return H5T_STR_SPACEPAD; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1STRING(JNIEnv *env, jclass cls) { return H5T_STRING; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1TIME(JNIEnv *env, jclass cls) { return H5T_TIME; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D32BE(JNIEnv *env, jclass cls) { return H5T_UNIX_D32BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D32LE(JNIEnv *env, jclass cls) { return H5T_UNIX_D32LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D64BE(JNIEnv *env, jclass cls) { return H5T_UNIX_D64BE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D64LE(JNIEnv *env, jclass cls) { return H5T_UNIX_D64LE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1VARIABLE(JNIEnv *env, jclass cls) { return (int)H5T_VARIABLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5T_1VLEN(JNIEnv *env, jclass cls) { return H5T_VLEN; }
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1CONT(JNIEnv *env, jclass cls) { return H5Z_CB_CONT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1ERROR(JNIEnv *env, jclass cls) { return H5Z_CB_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1FAIL(JNIEnv *env, jclass cls) { return H5Z_CB_FAIL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1NO(JNIEnv *env, jclass cls) { return H5Z_CB_NO; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1DISABLE_1EDC(JNIEnv *env, jclass cls) { return H5Z_DISABLE_EDC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1ENABLE_1EDC(JNIEnv *env, jclass cls) { return H5Z_ENABLE_EDC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1ERROR_1EDC(JNIEnv *env, jclass cls) { return H5Z_ERROR_EDC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1CONFIG_1DECODE_1ENABLED(JNIEnv *env, jclass cls) { return H5Z_FILTER_CONFIG_DECODE_ENABLED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1CONFIG_1ENCODE_1ENABLED(JNIEnv *env, jclass cls) { return H5Z_FILTER_CONFIG_ENCODE_ENABLED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1DEFLATE(JNIEnv *env, jclass cls) { return H5Z_FILTER_DEFLATE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1ERROR(JNIEnv *env, jclass cls) { return H5Z_FILTER_ERROR; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1FLETCHER32(JNIEnv *env, jclass cls) { return H5Z_FILTER_FLETCHER32; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1MAX(JNIEnv *env, jclass cls) { return H5Z_FILTER_MAX; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1NBIT(JNIEnv *env, jclass cls) {return H5Z_FILTER_NBIT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1NONE(JNIEnv *env, jclass cls) { return H5Z_FILTER_NONE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1RESERVED(JNIEnv *env, jclass cls) { return H5Z_FILTER_RESERVED; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SCALEOFFSET(JNIEnv *env, jclass cls){ return H5Z_FILTER_SCALEOFFSET; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SHUFFLE(JNIEnv *env, jclass cls) { return H5Z_FILTER_SHUFFLE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SZIP(JNIEnv *env, jclass cls) { return H5Z_FILTER_SZIP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1DEFMASK(JNIEnv *env, jclass cls) { return H5Z_FLAG_DEFMASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1INVMASK(JNIEnv *env, jclass cls) { return H5Z_FLAG_INVMASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1MANDATORY(JNIEnv *env, jclass cls) { return H5Z_FLAG_MANDATORY; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1OPTIONAL(JNIEnv *env, jclass cls) { return H5Z_FLAG_OPTIONAL; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1REVERSE(JNIEnv *env, jclass cls) { return H5Z_FLAG_REVERSE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1SKIP_1EDC(JNIEnv *env, jclass cls) { return H5Z_FLAG_SKIP_EDC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1MAX_1NFILTERS(JNIEnv *env, jclass cls) { return H5Z_MAX_NFILTERS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1NO_1EDC(JNIEnv *env, jclass cls) { return H5Z_NO_EDC; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1INT_1MINBITS_1DEFAULT(JNIEnv *env, jclass cls) { return H5Z_SO_INT_MINBITS_DEFAULT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1FLOAT_1DSCALE(JNIEnv *env, jclass cls){return H5Z_SO_FLOAT_DSCALE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1FLOAT_1ESCALE(JNIEnv *env, jclass cls){return H5Z_SO_FLOAT_ESCALE; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1INT(JNIEnv *env, jclass cls){return H5Z_SO_INT; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SHUFFLE_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SHUFFLE_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SHUFFLE_1TOTAL_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SHUFFLE_TOTAL_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SZIP_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1TOTAL_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SZIP_TOTAL_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1MASK(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_MASK; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1PPB(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_PPB; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1BPP(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_BPP; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1PPS(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_PPS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1NBIT_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_NBIT_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1SCALEOFFSET_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SCALEOFFSET_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1ALL(JNIEnv *env, jclass cls) { return H5Z_FILTER_ALL; }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/source/c/hdf-java/h5Imp.c b/source/c/hdf-java/h5Imp.c
new file mode 100755
index 0000000..c1a373c
--- /dev/null
+++ b/source/c/hdf-java/h5Imp.c
@@ -0,0 +1,212 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  general library functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#include "hdf5.h"
+#include <jni.h>
+#include "h5jni.h"
+/*
+#include <signal.h>
+*/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5open
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5open
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+    retVal =  H5open();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5close
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5close
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+    retVal =  H5close();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5dont_atexit
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5dont_1atexit
+  (JNIEnv *env, jclass clss)
+{
+    int retVal = H5dont_atexit();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5get_libversion
+ * Signature: ([I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5get_1libversion
+  (JNIEnv *env, jclass clss, jintArray libversion)
+{
+    unsigned *theArray = NULL;
+    jboolean isCopy;
+    int status;
+
+    if (libversion == NULL) {
+        h5nullArgument( env, "H5get_version:  libversion is NULL");
+        return -1;
+    }
+
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR libversion,&isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5get_libversion:  input not pinned");
+        return -1;
+    }
+
+    status =  H5get_libversion(&(theArray[0]), &(theArray[1]), &(theArray[2]));
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR libversion,(jint *)theArray,JNI_ABORT);
+        h5libraryError(env);
+    }
+    else {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR libversion,(jint *)theArray,0);
+    }
+    return (jint)status;
+}
+
+#ifdef notdef
+/*
+ struct sigaction {
+   int sa_flags;
+     void (*sa_handler)();
+     sigset_t sa_mask;
+     void (*sa_sigaction)(int, siginfo_t *, void *);
+};
+int sigaction(int sig, struct sigaction *act, struct sigaction *oact);
+*/
+void catch_abrt()
+{
+    /*  Raise Java exception */
+    printf("raise exception....\n");
+}
+#endif
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5check_version
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5check_1version
+  (JNIEnv *env, jclass clss, jint majnum, jint minnum, jint relnum)
+{
+    int status;
+/*
+ *   In principle, we want to catch the 'abort' signal, and
+ *  do something other than crash.
+ *   Look up how to do this portably.
+ */
+/*
+    int res;
+    struct sigaction ctchit;
+    struct sigaction old;
+    ctchit.sa_handler = catch_abrt;
+*/
+
+/*
+    res = sigaction(SIGABRT, &ctchit, &old);
+    if (res != 0) {
+        printf("sigaction failed\n");
+        return(-1);
+    }
+*/
+    /*  catch the signal? */
+    status = H5check_version((unsigned)majnum, (unsigned)minnum, (unsigned)relnum);
+/*
+    res = sigaction(SIGABRT, &old, 0);
+    if (res != 0) {
+        printf("sigaction failed\n");
+        return(-1);
+    }
+*/
+    return status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5garbage_collect
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5garbage_1collect
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+    retVal =  H5garbage_collect();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5set_free_list_limits
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5set_1free_1list_1limits
+  (JNIEnv *env, jclass clss, jint reg_global_lim, jint reg_list_lim,
+  jint arr_global_lim, jint arr_list_lim, jint blk_global_lim, jint blk_list_lim )
+{
+    int retVal = H5set_free_list_limits((int)reg_global_lim, (int)reg_list_lim,
+        (int)arr_global_lim, (int)arr_list_lim, (int)blk_global_lim, (int)blk_list_lim);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return retVal;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5aImp.c b/source/c/hdf-java/h5aImp.c
new file mode 100755
index 0000000..bedc45f
--- /dev/null
+++ b/source/c/hdf-java/h5aImp.c
@@ -0,0 +1,1456 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Attribute API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include "h5util.h"
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+#include "h5jni.h"
+
+herr_t H5AreadVL_str (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AreadVL_num (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AreadVL_comp (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+
+herr_t H5AwriteVL_str (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AwriteVL_num (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AwriteVL_comp (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Acreate
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Acreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint create_plist)
+{
+    hid_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Acreate:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Acreate: aName is not pinned");
+        return -1;
+    }
+
+    status = H5Acreate2((hid_t)loc_id, aName, (hid_t)type_id,
+        (hid_t)space_id, (hid_t)create_plist, (hid_t)H5P_DEFAULT );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aopen_name
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aopen_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    hid_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env,"H5Aopen_name:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env,"H5Aopen_name: name is not pinned");
+        return -1;
+    }
+
+    status = H5Aopen_name((hid_t)loc_id, aName);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aopen_idx
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aopen_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jint idx)
+{
+    hid_t retVal = -1;
+    retVal =  H5Aopen_idx((hid_t)loc_id, (unsigned int) idx );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Awrite
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR buf,&isCopy);
+
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP,JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5AwriteVL
+ * Signature: (II[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5AwriteVL
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jobjectArray buf)
+{
+    herr_t status;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5AwriteVL:  buf is NULL");
+        return -1;
+    }
+
+    if (H5Tis_variable_str((hid_t)mem_type_id) > 0) {
+        status = H5AwriteVL_str (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND) {
+        status = H5AwriteVL_comp (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_ARRAY) {
+        status = H5AwriteVL_comp (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else {
+        status = H5AwriteVL_num (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+
+    return (jint)status;
+}
+
+herr_t H5AwriteVL_num (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t      status;
+
+    h5unimplemented(env, "H5AwriteVL_num:  not implemented");
+    status = -1;
+
+    return status;
+}
+
+herr_t H5AwriteVL_comp (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t      status;
+
+    h5unimplemented(env, "H5AwriteVL_comp:  not implemented");
+    status = -1;
+
+    return status;
+}
+
+herr_t H5AwriteVL_str (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t  status = -1;
+    char  **wdata;
+    jsize   size;
+    jint    i;
+
+    size = ENVPTR->GetArrayLength(ENVPAR (jarray) buf);
+
+    wdata = (char**)malloc(size * sizeof (char*));
+    if (!wdata) {
+        h5JNIFatalError(env, "H5AwriteVL_str:  cannot allocate buffer");
+        return -1;
+    }
+
+    memset(wdata, 0, size * sizeof(char*));
+    for (i = 0; i < size; ++i) {
+        jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray) buf, i);
+        if (obj != 0) {
+            jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+            const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+            if (utf8) {
+                wdata[i] = (char*)malloc(length + 1);
+                if (wdata[i]) {
+                    memset(wdata[i], 0, (length + 1));
+                    strncpy(wdata[i], utf8, length);
+                }
+            }
+
+            ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+            ENVPTR->DeleteLocalRef(ENVPAR obj);
+        }
+    } /*for (i = 0; i < size; ++i) */
+
+    status = H5Awrite((hid_t)aid, (hid_t)tid, wdata);
+
+    // now free memory
+    for (i = 0; i < size; i++) {
+       if(wdata[i]) {
+           free(wdata[i]);
+       }
+    }
+    free(wdata);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aread
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR buf,&isCopy);
+
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf,byteP,JNI_ABORT);
+        h5libraryError(env);
+    }
+    else  {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf,byteP,0);
+    }
+
+    return (jint)status;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_space
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aget_1space
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Aget_space((hid_t)attr_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aget_1type
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Aget_type((hid_t)attr_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_name
+ * Signature: (IJ[Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1name
+  (JNIEnv *env, jclass clss, jint attr_id, jlong buf_size, jobjectArray name)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    long bs;
+
+    if (buf_size==0 && name == NULL)
+      return (jlong) H5Aget_name((hid_t)attr_id, 0, NULL);
+
+    bs = (long)buf_size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Aget_name:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Aget_name:  malloc failed");
+        return -1;
+    }
+    size = H5Aget_name((hid_t)attr_id, (size_t)buf_size, aName);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        return -1;
+        /*  exception, returns immediately */
+    }
+    /* successful return -- save the string; */
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+
+    if (str == NULL) {
+        free(aName);
+        h5JNIFatalError( env,"H5Aget_name:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+
+    ENVPTR->SetObjectArrayElement(ENVPAR name,0,str);
+
+    return (jlong)size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_num_attrs
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1num_1attrs
+  (JNIEnv *env, jclass clss, jint loc_id)
+{
+    int retVal = -1;
+    retVal =  H5Aget_num_attrs((hid_t)loc_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Adelete
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Adelete
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env,"H5Adelete:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env,"H5Adelete: name is not pinned");
+        return -1;
+    }
+
+    status = H5Adelete((hid_t)loc_id, aName );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aclose
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    herr_t retVal = 0;
+
+    if (attr_id > 0)
+        retVal =  H5Aclose((hid_t)attr_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5AreadVL
+ * Signature: (II[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5AreadVL
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jobjectArray buf)
+{
+    htri_t isStr;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5AreadVL:  buf is NULL");
+        return -1;
+    }
+
+    isStr = H5Tis_variable_str((hid_t)mem_type_id);
+
+    if (H5Tis_variable_str((hid_t)mem_type_id) > 0) {
+        return (jint) H5AreadVL_str (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND) {
+        return (jint) H5AreadVL_comp (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_ARRAY) {
+        return (jint) H5AreadVL_comp (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else {
+        return (jint) H5AreadVL_num (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+}
+
+herr_t H5AreadVL_num (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t  status;
+    int     i;
+    int     n;
+    size_t  max_len = 0;
+    h5str_t h5str;
+    jstring jstr;
+    hvl_t  *rdata = NULL;
+    size_t  size;
+    hid_t   sid;
+    hsize_t dims[H5S_MAX_RANK];
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+    rdata = (hvl_t *)calloc(n+1, sizeof(hvl_t));
+    if (rdata == NULL) {
+        h5JNIFatalError( env, "H5AreadVL_num:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, rdata);
+    dims[0] = n;
+    sid = H5Screate_simple(1, dims, NULL);
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+        H5Sclose(sid);
+        free(rdata);
+        h5JNIFatalError(env, "H5AreadVL_num: failed to read data");
+        return -1;
+    }
+
+    for (i = 0; i < n; i++) {
+        if ((rdata +i)->len > max_len)
+            max_len = (rdata + i)->len;
+    }
+
+    size = H5Tget_size(tid);
+    memset((void *)&h5str, (int)0, (size_t)sizeof(h5str_t));
+    h5str_new(&h5str, 4*size);
+
+    if (h5str.s == NULL) {
+        H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+        H5Sclose(sid);
+        free(rdata);
+        h5JNIFatalError(env, "H5AreadVL_num:  failed to allocate strng buf");
+        return -1;
+    }
+
+    for (i = 0; i < n; i++) {
+        h5str.s[0] = '\0';
+        h5str_sprintf(&h5str, aid, tid, rdata + i, 0);
+        jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+    }
+
+    h5str_free(&h5str);
+    H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+    H5Sclose(sid);
+
+    free(rdata);
+
+    return status;
+}
+
+herr_t H5AreadVL_comp (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t      status;
+    int         i;
+    int         n;
+    size_t      max_len = 0;
+    h5str_t     h5str;
+    jstring     jstr;
+    char       *rdata;
+    size_t      size;
+    hid_t       p_type;
+
+    p_type = H5Tget_native_type(tid, H5T_DIR_DEFAULT);
+    size = (((H5Tget_size(tid))>(H5Tget_size(p_type))) ? (H5Tget_size(tid)) : (H5Tget_size(p_type)));
+    H5Tclose(p_type);
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+    rdata = (char *)malloc(n * size);
+
+    if (rdata == NULL) {
+        h5JNIFatalError(env, "H5AreadVL_comp:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, rdata);
+
+    if (status < 0) {
+        free(rdata);
+        h5JNIFatalError(env, "H5AreadVL_comp: failed to read data");
+        return -1;
+    }
+
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new(&h5str, 4 * size);
+
+    if (h5str.s == NULL) {
+        free(rdata);
+        h5JNIFatalError(env, "H5AreadVL_comp:  failed to allocate string buf");
+        return -1;
+    }
+
+    for (i = 0; i < n; i++) {
+        h5str.s[0] = '\0';
+        h5str_sprintf(&h5str, aid, tid, rdata + i * size, 0);
+        jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+    }
+
+    h5str_free(&h5str);
+
+    free(rdata);
+
+    return status;
+}
+
+herr_t H5AreadVL_str (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t status=-1;
+    jstring jstr;
+    char **strs;
+    int i, n;
+    hid_t sid;
+    hsize_t dims[H5S_MAX_RANK];
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+
+    strs =(char **)malloc(n*sizeof(char *));
+    if (strs == NULL) {
+        h5JNIFatalError( env, "H5AreadVL_str:  failed to allocate buff for read variable length strings");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, strs);
+    if (status < 0) {
+        dims[0] = n;
+        sid = H5Screate_simple(1, dims, NULL);
+        H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, strs);
+        H5Sclose(sid);
+        free(strs);
+        h5JNIFatalError(env, "H5AreadVL_str: failed to read variable length strings");
+        return -1;
+    }
+
+    for (i=0; i<n; i++) {
+        jstr = ENVPTR->NewStringUTF(ENVPAR strs[i]);
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+        free (strs[i]);
+    }
+
+    /*
+    for repeatedly reading an attribute with a large number of strs (e.g., 1,000,000 strings,
+    H5Dvlen_reclaim() may crash on Windows because the Java GC will not be able to collect
+    free space in time. Instead, use "free(strs[i])" to free individual strings
+    after it is done.
+    H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, strs);
+    */
+
+    if (strs)
+        free(strs);
+
+    return status;
+}
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Acopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Acopy
+  (JNIEnv *env, jclass clss, jint src_id, jint dst_id)
+{
+    jbyte *buf;
+    herr_t retVal = -1;
+    hid_t src_did = (hid_t)src_id;
+    hid_t dst_did = (hid_t)dst_id;
+    hid_t tid=-1;
+    hid_t sid=-1;
+    hsize_t total_size = 0;
+
+
+    sid = H5Aget_space(src_did);
+    if (sid < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    tid = H5Aget_type(src_did);
+    if (tid < 0) {
+        H5Sclose(sid);
+        h5libraryError(env);
+        return -1;
+    }
+
+    total_size = H5Sget_simple_extent_npoints(sid) * H5Tget_size(tid);
+
+    H5Sclose(sid);
+
+    buf = (jbyte *)malloc( (int) (total_size * sizeof(jbyte)));
+    if (buf == NULL) {
+    H5Tclose(tid);
+        h5outOfMemory( env, "H5Acopy:  malloc failed");
+        return -1;
+    }
+
+    retVal = H5Aread(src_did, tid, buf);
+    H5Tclose(tid);
+
+    if (retVal < 0) {
+        free(buf);
+        h5libraryError(env);
+        return (jint)retVal;
+    }
+
+    tid = H5Aget_type(dst_did);
+    if (tid < 0) {
+        free(buf);
+        h5libraryError(env);
+        return -1;
+    }
+    retVal = H5Awrite(dst_did, tid, buf);
+    H5Tclose(tid);
+    free(buf);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Acreate2
+ * Signature: (ILjava/lang/String;IIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Acreate2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint create_plist, jint access_plist)
+{
+    hid_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Acreate2:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Acreate2: aName is not pinned");
+        return -1;
+    }
+
+  status = H5Acreate2((hid_t)loc_id, aName, (hid_t)type_id,
+        (hid_t)space_id, (hid_t)create_plist, (hid_t)access_plist );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.8.0                               *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Acreate2
+ * Signature: (ILjava/lang/String;IIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Acreate2
+(JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint create_plist, jint access_plist)
+{
+    hid_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Acreate2:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Acreate2: aName is not pinned");
+        return -1;
+    }
+
+  status = H5Acreate2((hid_t)loc_id, aName, (hid_t)type_id,
+        (hid_t)space_id, (hid_t)create_plist, (hid_t)access_plist );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Aopen
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aopen
+  (JNIEnv *env, jclass clss, jint obj_id, jstring name, jint access_plist)
+
+{
+   hid_t retVal;
+   char* aName;
+   jboolean isCopy;
+
+   if (name == NULL) {
+        h5nullArgument( env, "H5Aopen:  name is NULL");
+        return -1;
+    }
+
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Aopen: aName is not pinned");
+        return -1;
+    }
+
+  retVal = H5Aopen((hid_t)obj_id, aName, (hid_t)access_plist);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+    if (retVal< 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Aopen_by_idx
+ * Signature: (ILjava/lang/String;IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aopen_1by_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint idx_type, jint order, jlong n, jint aapl_id, jint lapl_id)
+{
+  hid_t retVal;
+  char* aName;
+  jboolean isCopy;
+
+  if (name == NULL) {
+    h5nullArgument( env, "H5Aopen_by_idx:  name is NULL");
+    return -1;
+  }
+
+  aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Aopen_by_idx: aName is not pinned");
+    return -1;
+  }
+
+  retVal = H5Aopen_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+    (H5_iter_order_t)order, (hsize_t)n, (hid_t)aapl_id, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR name,aName);
+
+  if (retVal< 0) {
+    h5libraryError(env);
+  }
+  return (jint)retVal;
+
+}
+
+/*
+* Class:     ncsa_hdf_hdf5lib_H5
+* Method:    _H5Acreate_by_name
+* Signature: (ILjava/lang/String;Ljava/lang/String;IIIII)I
+*/
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Acreate_1by_1name
+(JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring attr_name, jint type_id, jint space_id, jint acpl_id, jint aapl_id, jint lapl_id)
+{
+  hid_t retVal;
+  char *aName, *attrName;
+  jboolean isCopy;
+
+  if (obj_name == NULL) {
+    h5nullArgument( env, "H5Acreate_by_name:  object name is NULL");
+    return -1;
+  }
+  if (attr_name == NULL) {
+    h5nullArgument( env, "H5Acreate_by_name:  attribute name is NULL");
+    return -1;
+  }
+  aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Acreate_by_name: aName is not pinned");
+    return -1;
+  }
+  attrName = (char *)ENVPTR->GetStringUTFChars(ENVPAR attr_name, &isCopy);
+  if (attrName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+    h5JNIFatalError( env, "H5Acreate_by_name: attrName is not pinned");
+    return -1;
+  }
+
+  retVal = H5Acreate_by_name((hid_t)loc_id, aName, attrName, (hid_t)type_id,
+    (hid_t)space_id, (hid_t)acpl_id, (hid_t)aapl_id, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name,attrName);
+
+  if (retVal< 0) {
+    h5libraryError(env);
+  }
+  return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aexists_by_name
+ * Signature: (ILjava/lang/String;Ljava/lang/String;I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aexists_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring attr_name, jint lapl_id)
+{
+   htri_t retVal;
+   char *aName, *attrName;
+   jboolean isCopy;
+
+  if (obj_name == NULL) {
+        h5nullArgument( env, "H5Aexists_by_name:  object name is NULL");
+        return -1;
+  }
+  if (attr_name == NULL) {
+        h5nullArgument( env, "H5Aexists_by_name:  attribute name is NULL");
+        return -1;
+  }
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Aexists_by_name: aName is not pinned");
+        return -1;
+    }
+    attrName = (char *)ENVPTR->GetStringUTFChars(ENVPAR attr_name, &isCopy);
+    if (attrName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+        h5JNIFatalError( env, "H5Aexists_by_name: attrName is not pinned");
+        return -1;
+    }
+
+  retVal = H5Aexists_by_name((hid_t)loc_id, aName, attrName, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name,attrName);
+
+  if (retVal< 0) {
+        h5libraryError(env);
+    }
+    return (jboolean)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Arename
+ * Signature: (ILjava/lang/String;Ljava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Arename
+  (JNIEnv *env, jclass clss, jint loc_id, jstring old_attr_name, jstring new_attr_name)
+{
+    herr_t retVal;
+    char *oName, *nName;
+    jboolean isCopy;
+
+    if (old_attr_name == NULL) {
+        h5nullArgument( env, "H5Arename:  old_attr_name is NULL");
+        return -1;
+    }
+    if (new_attr_name == NULL) {
+        h5nullArgument( env, "H5Arename:  new_attr_name is NULL");
+        return -1;
+    }
+
+    oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR old_attr_name,&isCopy);
+    if (oName == NULL) {
+        h5JNIFatalError( env, "H5Arename:  old_attr_name not pinned");
+        return -1;
+    }
+    nName = (char *)ENVPTR->GetStringUTFChars(ENVPAR new_attr_name,&isCopy);
+    if (nName == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR old_attr_name,oName);
+        h5JNIFatalError( env, "H5Arename:  new_attr_name not pinned");
+        return -1;
+    }
+
+    retVal = H5Arename((hid_t)loc_id, oName, nName);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR old_attr_name,oName);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR new_attr_name,nName);
+
+    if (retVal< 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Arename_by_name
+ * Signature: (ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Arename_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring old_attr_name, jstring new_attr_name, jint lapl_id)
+{
+  herr_t retVal;
+  char *aName, *oName, *nName;
+  jboolean isCopy;
+
+  if (obj_name == NULL) {
+    h5nullArgument( env, "H5Arename_by_name:  object name is NULL");
+    return -1;
+  }
+  if (old_attr_name == NULL) {
+    h5nullArgument( env, "H5Arename_by_name:  old_attr_name is NULL");
+    return -1;
+  }
+  if (new_attr_name == NULL) {
+    h5nullArgument( env, "H5Arename_by_name:  new_attr_name is NULL");
+    return -1;
+  }
+
+  aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Arename_by_name: object name is not pinned");
+    return -1;
+  }
+  oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR old_attr_name,&isCopy);
+  if (oName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+    h5JNIFatalError( env, "H5Arename_by_name:  old_attr_name not pinned");
+    return -1;
+  }
+  nName = (char *)ENVPTR->GetStringUTFChars(ENVPAR new_attr_name,&isCopy);
+  if (nName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR old_attr_name,oName);
+    h5JNIFatalError( env, "H5Arename_by_name:  new_attr_name not pinned");
+    return -1;
+  }
+
+  retVal = H5Arename_by_name((hid_t)loc_id, aName, oName, nName, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR old_attr_name,oName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR new_attr_name,nName);
+
+  if (retVal< 0) {
+    h5libraryError(env);
+  }
+  return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_name_by_idx
+ * Signature: (ILjava/lang/String;IIJI)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1name_1by_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jint lapl_id)
+{
+  size_t   buf_size;
+  char    *aName;
+  char    *aValue;
+  jboolean isCopy;
+  jlong    status_size;
+  jstring  str = NULL;
+
+  if (obj_name == NULL) {
+    h5nullArgument( env, "H5Aget_name_by_idx:  object name is NULL");
+    return NULL;
+  }
+  aName = (char*)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Aget_name_by_idx:  name not pinned");
+    return NULL;
+  }
+
+  /* get the length of the attribute name */
+  status_size = H5Aget_name_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+    (H5_iter_order_t) order, (hsize_t) n, (char*)NULL, (size_t)0, (hid_t)lapl_id);
+
+  if(status_size < 0) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+    h5libraryError(env);
+    return NULL;
+  }
+  buf_size = (size_t)status_size + 1;/* add extra space for the null terminator */
+
+  aValue = (char*)malloc(sizeof(char) * buf_size);
+  if (aValue == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+    h5outOfMemory( env, "H5Aget_name_by_idx:  malloc failed ");
+    return NULL;
+  }
+
+  status_size = H5Aget_name_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+    (H5_iter_order_t) order, (hsize_t) n, (char*)aValue, (size_t)buf_size, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+
+  if (status_size < 0) {
+    free(aValue);
+    h5libraryError(env);
+    return NULL;
+  }
+  /* may throw OutOfMemoryError */
+  str = ENVPTR->NewStringUTF(ENVPAR aValue);
+  if (str == NULL) {
+    /* exception -- fatal JNI error */
+    free(aValue);
+    h5JNIFatalError( env, "H5Aget_name_by_idx:  return string not created");
+    return NULL;
+  }
+
+  free(aValue);
+
+  return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_storage_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1storage_1size
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+  hsize_t retVal = (hsize_t)-1;
+
+    retVal = H5Aget_storage_size((hid_t)attr_id);
+/* probably returns '0' if fails--don't do an exception
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+*/
+    return (jlong)retVal;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_info
+ * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1info
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    herr_t     status;
+    H5A_info_t ainfo;
+    jclass     cls;
+    jmethodID  constructor;
+    jvalue     args[4];
+    jobject    ret_info_t = NULL;
+
+    status = H5Aget_info((hid_t)attr_id, (H5A_info_t*)&ainfo);
+
+    if (status < 0) {
+       h5libraryError(env);
+       return NULL;
+    }
+
+    // get a reference to your class if you don't have it already
+    cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5A_info_t");
+    // get a reference to the constructor; the name is <init>
+    constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(ZJIJ)V");
+    args[0].z = ainfo.corder_valid;
+    args[1].j = ainfo.corder;
+    args[2].i = ainfo.cset;
+    args[3].j = ainfo.data_size;
+    ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+    return ret_info_t;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_info_by_idx
+ * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1info_1by_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jint lapl_id)
+{
+
+    char      *aName;
+    herr_t     status;
+    H5A_info_t ainfo;
+    jboolean   isCopy;
+    jclass     cls;
+    jmethodID  constructor;
+    jvalue     args[4];
+    jobject    ret_info_t = NULL;
+
+    if (obj_name == NULL) {
+        h5nullArgument( env, "H5Aget_info_by_idx: obj_name is NULL");
+        return NULL;
+    }
+
+    aName = (char*)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Aget_info_by_idx: object name not pinned");
+        return NULL;
+    }
+
+  status = H5Aget_info_by_idx((hid_t)loc_id, (const char*)aName, (H5_index_t)idx_type,
+    (H5_iter_order_t)order, (hsize_t)n, (H5A_info_t*)&ainfo, (hid_t)lapl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+
+    if (status < 0) {
+       h5libraryError(env);
+       return NULL;
+    }
+
+    // get a reference to your class if you don't have it already
+    cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5A_info_t");
+    // get a reference to the constructor; the name is <init>
+  constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(ZJIJ)V");
+    args[0].z = ainfo.corder_valid;
+    args[1].j = ainfo.corder;
+    args[2].i = ainfo.cset;
+    args[3].j = ainfo.data_size;
+    ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+    return ret_info_t;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_info_by_name
+ * Signature: (ILjava/lang/String;Ljava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aget_1info_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring attr_name, jint lapl_id)
+{
+  char      *aName;
+  char    *attrName;
+    herr_t     status;
+    H5A_info_t ainfo;
+    jboolean   isCopy;
+    jclass     cls;
+    jmethodID  constructor;
+    jvalue     args[4];
+    jobject    ret_info_t = NULL;
+
+    if (obj_name == NULL) {
+        h5nullArgument( env, "H5Aget_info_by_name: obj_name is NULL");
+        return NULL;
+    }
+  if (attr_name == NULL) {
+        h5nullArgument( env, "H5Aget_info_by_name: attr_name is NULL");
+        return NULL;
+    }
+    aName = (char*)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Aget_info_by_name: object name not pinned");
+        return NULL;
+    }
+    attrName = (char*)ENVPTR->GetStringUTFChars(ENVPAR attr_name, &isCopy);
+    if (attrName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+        h5JNIFatalError( env, "H5Aget_info_by_name: Attribute name not pinned");
+        return NULL;
+    }
+  status = H5Aget_info_by_name((hid_t)loc_id, (const char*)aName, (const char*)attrName,
+    (H5A_info_t*)&ainfo,(hid_t)lapl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name, attrName);
+
+    if (status < 0) {
+       h5libraryError(env);
+       return NULL;
+    }
+
+    // get a reference to your class if you don't have it already
+    cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5A_info_t");
+    // get a reference to the constructor; the name is <init>
+  constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(ZJIJ)V");
+    args[0].z = ainfo.corder_valid;
+    args[1].j = ainfo.corder;
+    args[2].i = ainfo.cset;
+    args[3].j = ainfo.data_size;
+    ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+    return ret_info_t;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Adelete_by_name
+ * Signature: (ILjava/lang/String;Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Adelete_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring attr_name, jint lapl_id)
+{
+   herr_t retVal;
+   char *aName, *attrName;
+   jboolean isCopy;
+
+  if (obj_name == NULL) {
+        h5nullArgument( env, "H5Adelete_by_name:  object name is NULL");
+        return -1;
+  }
+  if (attr_name == NULL) {
+        h5nullArgument( env, "H5Adelete_by_name:  attribute name is NULL");
+        return -1;
+  }
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Adelete_by_name: aName is not pinned");
+        return -1;
+    }
+    attrName = (char *)ENVPTR->GetStringUTFChars(ENVPAR attr_name, &isCopy);
+    if (attrName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+        h5JNIFatalError( env, "H5Adelete_by_name: attrName is not pinned");
+        return -1;
+    }
+    retVal = H5Adelete_by_name((hid_t)loc_id, (const char*)aName, (const char*)attrName, (hid_t)lapl_id);
+
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name,attrName);
+
+  if (retVal< 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aexists
+ * Signature: (ILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Aexists
+  (JNIEnv *env, jclass clss, jint obj_id, jstring attr_name)
+{
+  char    *aName;
+  jboolean isCopy;
+  htri_t   bval = 0;
+
+  if (attr_name == NULL) {
+    h5nullArgument( env, "H5Aexists: attr_name is NULL");
+    return JNI_FALSE;
+  }
+  aName = (char*)ENVPTR->GetStringUTFChars(ENVPAR attr_name, &isCopy);
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Aexists: attr_name not pinned");
+    return JNI_FALSE;
+  }
+
+  bval = H5Aexists((hid_t)obj_id, (const char*)aName);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name, aName);
+
+  if (bval > 0) {
+    return JNI_TRUE;
+  }
+  else if (bval == 0) {
+    return JNI_FALSE;
+  }
+  else {
+    h5libraryError(env);
+    return JNI_FALSE;
+  }
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Adelete_by_idx
+ * Signature: (ILjava/lang/String;IIJI)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Adelete_1by_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jint lapl_id)
+{
+  char      *aName;
+  herr_t     status;
+  jboolean   isCopy;
+
+  if (obj_name == NULL) {
+    h5nullArgument( env, "H5Adelete_by_idx: obj_name is NULL");
+    return;
+  }
+
+  aName = (char*)ENVPTR->GetStringUTFChars(ENVPAR obj_name, &isCopy);
+  if (aName == NULL) {
+    h5JNIFatalError( env, "H5Adelete_by_idx: obj_name not pinned");
+    return;
+  }
+
+  status = H5Adelete_by_idx((hid_t)loc_id, (const char*)aName, (H5_index_t)idx_type,
+    (H5_iter_order_t)order, (hsize_t)n, (hid_t)lapl_id);
+  ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name, aName);
+
+  if (status < 0) {
+    h5libraryError(env);
+    return;
+  }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Aopen_by_name
+ * Signature: (ILjava/lang/String;Ljava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Aopen_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring obj_name, jstring attr_name, jint aapl_id, jint lapl_id)
+
+{
+    hid_t status;
+    char *aName, *oName;
+    jboolean isCopy;
+
+    if (obj_name == NULL) {
+        h5nullArgument( env,"_H5Aopen_by_name:  obj_name is NULL");
+        return -1;
+    }
+  if (attr_name == NULL) {
+        h5nullArgument( env,"_H5Aopen_by_name:  attr_name is NULL");
+        return -1;
+    }
+
+  oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR obj_name,&isCopy);
+    if (oName == NULL) {
+        h5JNIFatalError( env,"_H5Aopen_by_name: obj_name is not pinned");
+        return -1;
+    }
+    aName = (char *)ENVPTR->GetStringUTFChars(ENVPAR attr_name,&isCopy);
+    if (aName == NULL) {
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,oName);
+        h5JNIFatalError( env,"_H5Aopen_by_name: attr_name is not pinned");
+        return -1;
+    }
+
+    status = H5Aopen_by_name((hid_t)loc_id, oName, aName, (hid_t)aapl_id, (hid_t)lapl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR obj_name,oName);
+   ENVPTR->ReleaseStringUTFChars(ENVPAR attr_name,aName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5dImp.c b/source/c/hdf-java/h5dImp.c
new file mode 100755
index 0000000..39586e0
--- /dev/null
+++ b/source/c/hdf-java/h5dImp.c
@@ -0,0 +1,2075 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Dataset Object API Functions of HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5util.h"
+#include "h5jni.h"
+#include "h5dImp.h"
+
+#ifdef __cplusplus
+  #ifdef _WINDOWS
+    #include <direct.h>
+  #endif
+#endif
+
+#ifdef _WINDOWS
+  #define CHDIR _chdir
+  #define GETCWD _getcwd
+#else
+  #define CHDIR chdir
+  #define GETCWD getcwd
+#endif
+
+#ifdef __cplusplus
+  #define CBENVPTR (cbenv)
+  #define CBENVPAR 
+  #define JVMPTR (jvm)
+  #define JVMPAR 
+  #define JVMPAR2 
+#else
+  #define CBENVPTR (*cbenv)
+  #define CBENVPAR cbenv,
+  #define JVMPTR (*jvm)
+  #define JVMPAR jvm
+  #define JVMPAR2 jvm,
+#endif
+
+herr_t H5DreadVL_str (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+herr_t H5DreadVL_notstr (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+
+#define PIN_BYTE_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jbyte*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_BYTE_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_SHORT_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jshort*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetShortArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_SHORT_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseShortArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_INT_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jint*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetIntArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_INT_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseIntArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_LONG_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jlong*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetLongArrayElements(ENVPAR buf,&isCopy); \
+}
+
+#define UNPIN_LONG_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseLongArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_FLOAT_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jfloat*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetFloatArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_FLOAT_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseFloatArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_DOUBLE_ARRAY() { \
+    if (isCriticalPinning) \
+        buffP = (jdouble*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+    else \
+        buffP = ENVPTR->GetDoubleArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_DOUBLE_ARRAY(mode) { \
+    if (isCriticalPinning) \
+        ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+    else \
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint create_plist_id)
+{
+    hid_t    status;
+    char    *file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Dcreate:  name is NULL");
+        return -1;
+    }
+    file = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dcreate:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Dcreate2(loc_id, file, type_id, space_id, (hid_t)H5P_DEFAULT, (hid_t)create_plist_id, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, file);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dchdir_ext
+ * Signature: (Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dchdir_1ext
+  (JNIEnv *env, jclass clss, jstring dir_name)
+{
+    hid_t    status;
+    char    *file;
+    jboolean isCopy;
+
+    if (dir_name == NULL) {
+        h5nullArgument( env, "H5Dchdir_ext:  dir_name is NULL");
+        return -1;
+    }
+    file = (char*)ENVPTR->GetStringUTFChars(ENVPAR dir_name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dchdir_ext:  file dir not pinned");
+        return -1;
+    }
+
+    status = CHDIR( file );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR dir_name, file);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dgetdir_1ext
+ * Signature: ([Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dgetdir_1ext
+  (JNIEnv *env, jclass clss, jobjectArray dir_name, jint buf_size)
+{
+    char   *aName;
+    jstring str;
+
+    if (buf_size <= 0) {
+        h5badArgument( env, "H5Dgetcwd:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char) * buf_size);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Dgetcwd:  malloc failed");
+        return -1;
+    }
+    GETCWD( (char*)aName, (size_t)buf_size);
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+
+    free(aName);
+
+    if (str == NULL) {
+         h5JNIFatalError( env,"H5Dgetcwd:  return string failed");
+         return -1;
+    }
+
+    ENVPTR->SetObjectArrayElement(ENVPAR dir_name, 0, str);
+
+    return 0;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dopen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    hid_t    status;
+    char    *file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Dopen:  name is NULL");
+        return -1;
+    }
+
+    file = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dopen:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Dopen2((hid_t)loc_id, (const char*)file, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, file);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_space
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1space
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Dget_space((hid_t)dataset_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1type
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Dget_type((hid_t)dataset_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1create_1plist
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Dget_create_plist((hid_t)dataset_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+htri_t H5Tdetect_variable_str(hid_t tid) {
+    htri_t ret_val = 0;
+
+    if (H5Tget_class(tid) == H5T_COMPOUND) {
+        hid_t mtid = H5Tget_member_type(tid, 0);
+        ret_val = H5Tdetect_variable_str(mtid);
+        H5Tclose (mtid);
+    }
+    else
+        ret_val = H5Tis_variable_str(tid);
+
+    return ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread
+ * Signature: (IIIII[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jbyte   *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+
+    PIN_BYTE_ARRAY();
+
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_BYTE_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_BYTE_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite
+ * Signature: (IIIII[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jbyte   *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+
+    PIN_BYTE_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_BYTE_ARRAY(JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dextend
+ * Signature: (I[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dextend
+  (JNIEnv *env, jclass clss, jint dataset_id, jbyteArray size)
+{
+    herr_t   status;
+    int      i;
+    int      rank;
+    hsize_t *sa;
+    hsize_t *lp;
+    jbyte   *P;
+    jboolean isCopy;
+    jlong   *jlp;
+
+    if (size == NULL) {
+        h5nullArgument( env, "H5Dextend:  array of sizes is NULL");
+        return -1;
+    }
+    /*
+     *  Future:  check that the array has correct
+     *           rank (same as dataset dataset_id)
+     */
+    P = ENVPTR->GetByteArrayElements(ENVPAR size, &isCopy);
+    if (P == NULL) {
+        h5JNIFatalError( env, "H5Dextend:  array not pinned");
+        return -1;
+    }
+    i = ENVPTR->GetArrayLength(ENVPAR size);
+    rank = i / sizeof(jlong);
+    sa = lp = (hsize_t*)malloc(rank * sizeof(hsize_t));
+    if (sa == NULL)  {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR size, P, JNI_ABORT);
+        h5JNIFatalError(env,  "H5Dextend:  size not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong*)P;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t) * jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Dextend((hid_t)dataset_id, (hsize_t*)sa);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR size, P, JNI_ABORT);
+    free(sa);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dclose
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = 0;
+
+    retVal = H5Dclose((hid_t)dataset_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_storage_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1storage_1size
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hsize_t retVal = (hsize_t)-1;
+    if (dataset_id < 0) {
+        h5badArgument(env, "H5Dget_storage_size: not a dataset");
+    }
+    retVal = H5Dget_storage_size((hid_t)dataset_id );
+    return (jlong)retVal;
+}
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dcopy
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dcopy
+  (JNIEnv *env, jclass clss, jint src_id, jint dst_id)
+{
+    jbyte  *buf;
+    herr_t  retVal = -1;
+    hid_t   src_did = (hid_t)src_id;
+    hid_t   dst_did = (hid_t)dst_id;
+    hid_t   tid = -1;
+    hid_t   sid = -1;
+    hsize_t total_size = 0, total_allocated_size;
+
+    total_allocated_size = H5Dget_storage_size(src_did);
+    if (total_allocated_size <=0)
+      return 0; // nothing to write;
+
+    sid = H5Dget_space(src_did);
+    if (sid < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    tid = H5Dget_type(src_did);
+    if (tid < 0) {
+        H5Sclose(sid);
+        h5libraryError(env);
+        return -1;
+    }
+
+    total_size = H5Sget_simple_extent_npoints(sid) * H5Tget_size(tid);
+
+    H5Sclose(sid);
+
+    buf = (jbyte*)malloc((int)(total_size * sizeof(jbyte)));
+    if (buf == NULL) {
+        H5Tclose(tid);
+        h5outOfMemory(env, "H5Dcopy:  malloc failed");
+        return -1;
+    }
+
+    retVal = H5Dread((hid_t)src_did, (hid_t)tid, (hid_t)H5S_ALL, (hid_t)H5S_ALL, (hid_t)H5P_DEFAULT, buf);
+    H5Tclose(tid);
+
+    if (retVal < 0) {
+        free(buf);
+        h5libraryError(env);
+        return (jint)retVal;
+    }
+
+    tid = H5Dget_type(dst_did);
+    if (tid < 0) {
+        free(buf);
+        h5libraryError(env);
+        return -1;
+    }
+    retVal = H5Dwrite((hid_t)dst_did, (hid_t)tid, (hid_t)H5S_ALL, (hid_t)H5S_ALL, (hid_t)H5P_DEFAULT, buf);
+    H5Tclose(tid);
+    free(buf);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_get_buf_size
+ * Signature: (III[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size
+  (JNIEnv *env, jclass clss, jint dataset_id, jint type_id, jint space_id,
+          jintArray size)
+{
+    jint    *P;
+    jboolean isCopy;
+    herr_t   status;
+    hsize_t  sz;
+
+    if ( size == NULL ) {
+        h5nullArgument(env, "H5Dvlen_get_buf_size:  size is NULL");
+        return -1;
+    }
+
+    P = ENVPTR->GetIntArrayElements(ENVPAR size, &isCopy);
+    if (P == NULL) {
+        h5JNIFatalError(env, "H5Dvlen_get_buf_size:  array not pinned");
+        return -1;
+    }
+
+    status = (jint)H5Dvlen_get_buf_size((hid_t)dataset_id, (hid_t)type_id, (hid_t)space_id, (hsize_t*)&sz);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR size, P, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    P[0] = (jint)sz;
+    ENVPTR->ReleaseIntArrayElements(ENVPAR size, P, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_reclaim
+ * Signature: (III[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1reclaim
+  (JNIEnv *env, jclass clss, jint type_id, jint space_id,
+   jint xfer_plist_id, jbyteArray buf)
+{
+    herr_t   status;
+    jbyte   *byteP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dvlen_reclaim:  buf is NULL");
+        return -1;
+    }
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Dvlen_reclaim:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dvlen_reclaim((hid_t)type_id,
+        (hid_t)space_id, (hid_t)xfer_plist_id, byteP);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/***************************************************************
+ *                   New APIs for HDF5.1.6                     *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_space_status
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1space_1status
+  (JNIEnv *env, jclass clss, jint dset_id, jintArray status)
+{
+    jint    *theArray;
+    jboolean isCopy;
+    H5D_space_status_t space_status;
+    herr_t   retVal = -1;
+
+    if (status == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Dget_space_status:  status is NULL");
+        return -1;
+    }
+    theArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR status, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Dget_space_status:  status not pinned");
+        return -1;
+    }
+
+    retVal = H5Dget_space_status((hid_t)dset_id, (H5D_space_status_t*)&space_status );
+
+    if (retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    theArray[0] = space_status;
+    ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, 0);
+
+    return (jint)retVal;
+}
+
+
+/*
+    ////////////////////////////////////////////////////////////////////
+    //                                                                //
+    //         New APIs for read data from library                    //
+    //  Using H5Dread(..., Object buf) requires function calls        //
+    //  theArray.emptyBytes() and theArray.arrayify( buf), which      //
+    //  triples the actual memory needed by the data set.             //
+    //  Using the following APIs solves the problem.                  //
+    //                                                                //
+    ////////////////////////////////////////////////////////////////////
+*/
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_short
+ * Signature: (IIIII[S[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1short
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jshortArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jshort  *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_short:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_short:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dread_short:  buf is NULL");
+        return -1;
+    }
+
+    PIN_SHORT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dread_short:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_SHORT_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_SHORT_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_int
+ * Signature: (IIIII[I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1int
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jintArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jint    *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_int:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_int:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dread_int:  buf is NULL");
+        return -1;
+    }
+
+    PIN_INT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dread_int:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_INT_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_INT_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_long
+ * Signature: (IIIII[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1long
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jlongArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jlong   *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_long:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_long:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dread_long:  buf is NULL");
+        return -1;
+    }
+
+    PIN_LONG_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dread_long:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_LONG_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_LONG_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_float
+ * Signature: (IIIII[F[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1float
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jfloatArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jfloat  *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_float:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_float:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dread_float:  buf is NULL");
+        return -1;
+    }
+
+    PIN_FLOAT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dread_float:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_FLOAT_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_FLOAT_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_double
+ * Signature: (IIIII[D[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1double
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jdoubleArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jdouble *buffP;
+    jboolean isCopy;
+    htri_t data_class;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    data_class = H5Tdetect_class(mem_type_id, H5T_VLEN);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_double:  buf does not support variable length type");
+        return -1;
+    }
+    /* recursive detect any vlen string in type (compound, array ...) */
+    data_class = H5Tdetect_variable_str(mem_type_id);
+    if((data_class == 1) || (data_class < 0)) {
+        h5badArgument( env, "H5Dread_double:  buf does not support variable length string type");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dread_double:  buf is NULL");
+        return -1;
+    }
+
+    PIN_DOUBLE_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dread_double:  buf not pinned");
+        return -1;
+    }
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+        UNPIN_DOUBLE_ARRAY(JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    UNPIN_DOUBLE_ARRAY(0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_string
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1string
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jobjectArray j_buf)
+{
+    herr_t  status;
+    char   *c_buf;
+    char   *cstr;
+    size_t  str_len;
+    size_t  i;
+    size_t  n;
+    size_t  pos;
+    jstring jstr;
+
+    c_buf = cstr = NULL;
+    if (j_buf == NULL) {
+        h5nullArgument(env, "H5Dread_string:  buf is NULL");
+        return -1;
+    }
+
+    n = ENVPTR->GetArrayLength(ENVPAR j_buf);
+    if (n <= 0) {
+        h5nullArgument(env, "H5Dread_string:  buf length <=0");
+        return -1;
+    }
+
+    if ((str_len = H5Tget_size((hid_t)mem_type_id)) <=0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    if ((cstr = (char*)malloc(str_len + 1)) == NULL) {
+        h5JNIFatalError(env, "H5Dread_string: memory allocation failed.");
+        return -1;
+    }
+
+    if ((c_buf = (char*)malloc(n * str_len)) == NULL) {
+        if (cstr)
+            free (cstr);
+        cstr = NULL;
+        h5JNIFatalError(env, "H5Dread_string: memory allocation failed.");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                     (hid_t)file_space_id, (hid_t)xfer_plist_id, c_buf);
+
+    if (status < 0) {
+        if (cstr)
+            free (cstr);
+        cstr = NULL;
+        if (c_buf)
+            free (c_buf);
+        c_buf = NULL;
+        h5libraryError(env);
+        return -1;
+    }
+
+    pos = 0;
+    for (i=0; i<n; i++) {
+        memcpy(cstr, c_buf+pos, str_len);
+        cstr[str_len] = '\0';
+        jstr = ENVPTR->NewStringUTF(ENVPAR cstr);
+        ENVPTR->SetObjectArrayElement(ENVPAR j_buf, i, jstr);
+        pos += str_len;
+    }
+
+    if (c_buf)
+        free(c_buf);
+
+    if (cstr)
+        free (cstr);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_short
+ * Signature: (IIIII[S[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1short
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jshortArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jshort  *buffP;
+    jboolean isCopy;
+
+    if (buf == NULL ) {
+        h5nullArgument(env, "H5Dwrite_short:  buf is NULL");
+        return -1;
+    }
+
+    PIN_SHORT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dwrite_short:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_SHORT_ARRAY(JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_int
+ * Signature: (IIIII[I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1int
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jintArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jint    *buffP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dwrite_int:  buf is NULL");
+        return -1;
+    }
+
+    PIN_INT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dwrite_int:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_INT_ARRAY(JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_long
+ * Signature: (IIIII[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1long
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jlongArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jlong   *buffP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dwrite_long:  buf is NULL");
+        return -1;
+    }
+
+    PIN_LONG_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dwrite_long:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_LONG_ARRAY(JNI_ABORT);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_float
+ * Signature: (IIIII[F[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1float
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jfloatArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jfloat  *buffP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dwrite_float:  buf is NULL");
+        return -1;
+    }
+
+    PIN_FLOAT_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dwrite_float:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_FLOAT_ARRAY(JNI_ABORT);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_double
+ * Signature: (IIIII[D[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1double
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jdoubleArray buf, jboolean isCriticalPinning)
+{
+    herr_t   status;
+    jdouble *buffP;
+    jboolean isCopy;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dwrite_double:  buf is NULL");
+        return -1;
+    }
+
+    PIN_DOUBLE_ARRAY();
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dwrite_double:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    UNPIN_DOUBLE_ARRAY(JNI_ABORT);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+// Rosetta Biosoftware
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5DwriteString
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5DwriteString
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    herr_t  status;
+    char  **wdata;
+    jsize   size;
+    jint    i;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5DwriteString:  buf is NULL");
+        return -1;
+    }
+
+    size = ENVPTR->GetArrayLength(ENVPAR (jarray) buf);
+    wdata = (char**)malloc(size * sizeof (char*));
+
+    if (!wdata) {
+        h5JNIFatalError(env, "H5DwriteString:  cannot allocate buffer");
+        return -1;
+    }
+
+    memset(wdata, 0, size * sizeof(char*));
+    for (i = 0; i < size; ++i) {
+        jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray) buf, i);
+        if (obj != 0) {
+            jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+            const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+            if (utf8) {
+                wdata[i] = (char*)malloc(length + 1);
+                if (wdata[i]) {
+                  memset(wdata[i], 0, (length + 1));
+                  strncpy(wdata[i], utf8, length);
+                }
+           }
+
+           ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+           ENVPTR->DeleteLocalRef(ENVPAR obj);
+        }
+    } /*for (i = 0; i < size; ++i) */
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, wdata);
+
+    // now free memory
+    for (i = 0; i < size; i++) {
+       if(wdata[i]) {
+           free(wdata[i]);
+       }
+    }
+    free(wdata);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5DwriteNotString
+ * Signature: (IIIII[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5DwriteNotString
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning)
+{
+    herr_t  status;
+    hvl_t  *wdata;
+    size_t  size;
+    jsize   n;
+    jbyte   *buffP;
+    jboolean isCopy;
+    jint    i;
+    jint    j;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5DwriteNotString:  buf is NULL");
+        return -1;
+    }
+
+    PIN_BYTE_ARRAY();
+
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5DwriteNotString:  buf not pinned");
+        return -1;
+    }
+
+    /* rebuild VL structure */
+    n = ENVPTR->GetArrayLength(ENVPAR (jarray) buf);
+    wdata = (hvl_t*)calloc(n, sizeof(hvl_t));
+
+    if (!wdata) {
+        h5JNIFatalError(env, "H5DwriteNotString:  cannot allocate buffer");
+        return -1;
+    }
+
+    size = H5Tget_size(mem_type_id);
+    memset(wdata, 0, n * sizeof(hvl_t));
+    /* Allocate and initialize VL data to write */
+//    for (i = 0; i < n; i++) {
+//        jbyte *obj = (jbyte *) ENVPTR->GetByteArrayElement(ENVPAR (jbyteArray) buf, i);
+//        if (obj != 0) {
+//            jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+//            const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+//
+//            if (utf8) {
+//                wdata[i].p = malloc(length * size);
+//                if (wdata[i].p == NULL) {
+//                    h5JNIFatalError(env, "H5DwriteNotString:  cannot allocate memory for VL data!");
+//                    return -1;
+//                } /* end if */
+//                wdata[i].len = length;
+//                for(j = 0; j < length; j++)
+//                    switch(mem_type_id) {
+//                    case float:
+//                        ((float *)wdata[i].p)[j] = (float)(utf8);
+//                        break;
+//                    }
+//            }
+//
+//            ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+//            ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+//        }
+//    } /*for (i = 0; i < n; ++i) */
+
+    UNPIN_BYTE_ARRAY(0);
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+                      (hid_t)file_space_id, (hid_t)xfer_plist_id, wdata);
+
+    // now free memory
+    for (i = 0; i < n; i++) {
+       if(wdata[i].p) {
+           free(wdata[i].p);
+       }
+    }
+    free(wdata);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/**
+ *  Read VLEN data into array of arrays.
+ *  Object[] buf contains VL arrays of data points
+ *  Currently only deal with variable length of atomic data types
+ */
+/* old version */
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5DreadVL
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5DreadVL
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    htri_t isStr=0, isComplex=0, isVlenStr=0;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5DreadVL:  buf is NULL");
+        return -1;
+    }
+
+    /* fixed bug 951
+    if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND) {
+        hid_t nested_tid = H5Tget_member_type((hid_t)mem_type_id, 0);
+        isStr = H5Tis_variable_str(nested_tid);
+        H5Tclose(nested_tid);
+    }
+    else
+        isStr = H5Tis_variable_str((hid_t)mem_type_id);
+    */
+
+    /* fixed bug 2105, the following line does not detect array of vlen strings
+    isStr = H5Tdetect_variable_str((hid_t)mem_type_id);
+    */
+
+    isStr = H5Tdetect_class((hid_t)mem_type_id, H5T_STRING);
+
+
+    /* fixed compound of vlen of compound */
+    if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND) {
+        hid_t nested_tid = H5Tget_member_type((hid_t)mem_type_id, 0);
+        isComplex = H5Tdetect_class((hid_t)nested_tid, H5T_COMPOUND) ||
+                    H5Tdetect_class((hid_t)nested_tid, H5T_VLEN);
+        H5Tclose(nested_tid);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_VLEN) {
+      isVlenStr = 1; /* strings created by H5Tvlen_create( H5T_C_S1) */
+    }
+
+    if (isStr == 0 || isComplex>0 || isVlenStr) {
+        return (jint) H5DreadVL_notstr (env, (hid_t)dataset_id, (hid_t)mem_type_id,
+                                     (hid_t)mem_space_id, (hid_t)file_space_id,
+                                     (hid_t)xfer_plist_id, buf);
+    }
+
+    if (isStr > 0) {
+        return (jint) H5DreadVL_str (env, (hid_t)dataset_id, (hid_t)mem_type_id,
+                                     (hid_t)mem_space_id, (hid_t)file_space_id,
+                                     (hid_t)xfer_plist_id, buf);
+    }
+
+
+    return -1;
+}
+
+herr_t H5DreadVL_notstr (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid,
+    hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf)
+{
+    jint    i;
+    jint    n;
+    jstring jstr;
+    herr_t  status;
+    h5str_t h5str;
+    hvl_t  *rdata;
+    size_t  size;
+    size_t  max_len = 0;
+
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+
+    rdata = (hvl_t*)calloc(n, sizeof(hvl_t));
+    if (rdata == NULL) {
+        h5JNIFatalError(env, "H5DreadVL_notstr:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, rdata);
+
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, rdata);
+        free(rdata);
+        h5JNIFatalError(env, "H5DreadVL_notstr: failed to read data");
+        return -1;
+    }
+
+    max_len = 1;
+    for (i=0; i<n; i++) {
+        if ((rdata + i)->len > max_len)
+            max_len = (rdata + i)->len;
+    }
+
+    size = H5Tget_size(tid) * max_len;
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new(&h5str, 4 * size);
+
+    if (h5str.s == NULL) {
+        H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, rdata);
+        free(rdata);
+        h5JNIFatalError(env, "H5DreadVL_notstr:  failed to allocate strng buf");
+        return -1;
+    }
+
+    for (i=0; i<n; i++) {
+        h5str.s[0] = '\0';
+        h5str_sprintf(&h5str, did, tid, rdata+i, 0);
+        jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+    }
+    h5str_free(&h5str);
+
+    H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, rdata);
+    free(rdata);
+
+    return status;
+}
+
+herr_t H5DreadVL_str (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t
+    file_sid, hid_t xfer_plist_id, jobjectArray buf)
+{
+    char  **strs;
+    jstring jstr;
+    jint    i;
+    jint    n;
+    herr_t  status = -1;
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+    strs =(char**)calloc(n, sizeof(char*));
+
+    if (strs == NULL) {
+        h5JNIFatalError(env, "H5DreadVL_str:  failed to allocate buff for read variable length strings");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, strs);
+
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, strs);
+        free(strs);
+        h5JNIFatalError(env, "H5DreadVL_str: failed to read variable length strings");
+        return -1;
+    }
+
+    for (i=0; i<n; i++) {
+        jstr = ENVPTR->NewStringUTF(ENVPAR strs[i]);
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+        free (strs[i]);
+    }
+
+    /*
+    for repeatedly reading a dataset with a large number of strs (e.g., 1,000,000 strings,
+    H5Dvlen_reclaim() may crash on Windows because the Java GC will not be able to collect
+    free space in time. Instead, use "free(strs[i])" above to free individual strings
+    after it is done.
+    H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, strs);
+    */
+
+    free(strs);
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_reg_ref
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1reg_1ref (JNIEnv *env, jclass clss,
+    jint dataset_id, jint mem_type_id, jint mem_space_id,
+    jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    herr_t    status;
+    h5str_t   h5str;
+    size_t    size;
+    hdset_reg_ref_t *ref_data;
+    jint      i;
+    jint      n;
+    jstring   jstr;
+
+    hid_t region = -1;
+    hid_t did = (hid_t) dataset_id;
+    hid_t tid = (hid_t) mem_type_id;
+    hid_t mem_sid = (hid_t) mem_space_id;
+    hid_t file_sid = (hid_t) file_space_id;
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+    size = sizeof(hdset_reg_ref_t); /*H5Tget_size(tid);*/
+    ref_data = (hdset_reg_ref_t*)malloc(size * n);
+
+    if (ref_data == NULL) {
+        h5JNIFatalError(env, "H5Dread_reg_ref:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, ref_data);
+
+    if (status < 0) {
+        free(ref_data);
+        h5JNIFatalError(env, "H5Dread_reg_ref: failed to read data");
+        return -1;
+    }
+
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new(&h5str, 1024);
+    for (i=0; i<n; i++) {
+        h5str.s[0] = '\0';
+        h5str_sprintf(&h5str, did, tid, ref_data[i], 0);
+        jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+    }
+
+    h5str_free(&h5str);
+    free(ref_data);
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_reg_ref_data
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1reg_1ref_1data (JNIEnv *env, jclass clss,
+    jint dataset_id, jint mem_type_id, jint mem_space_id,
+    jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    herr_t    status;
+    h5str_t   h5str;
+    size_t    size;
+    hdset_reg_ref_t *ref_data;
+    jint      i;
+    jint      n;
+    jstring   jstr;
+
+    hid_t        region_obj;
+    H5S_sel_type region_type;
+
+    hid_t region = -1;
+    hid_t did = (hid_t) dataset_id;
+    hid_t tid = (hid_t) mem_type_id;
+    hid_t mem_sid = (hid_t) mem_space_id;
+    hid_t file_sid = (hid_t) file_space_id;
+
+    n = ENVPTR->GetArrayLength(ENVPAR buf);
+    size = sizeof(hdset_reg_ref_t); /*H5Tget_size(tid);*/
+    ref_data = (hdset_reg_ref_t*)malloc(size * n);
+
+    if (ref_data == NULL) {
+        h5JNIFatalError(env, "H5Dread_reg_ref_data:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, ref_data);
+
+    if (status < 0) {
+        free(ref_data);
+        h5JNIFatalError(env, "H5Dread_reg_ref_data: failed to read data");
+        return -1;
+    }
+
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new(&h5str, 1024);
+    for (i=0; i<n; i++) {
+        h5str.s[0] = '\0';
+
+        /* get name of the dataset the region reference points to using H5Rget_name */
+        region_obj = H5Rdereference(did, H5R_DATASET_REGION, ref_data[i]);
+        if (region_obj >= 0) {
+            region = H5Rget_region(did, H5R_DATASET_REGION, ref_data[i]);
+            if (region >= 0) {
+				region_type = H5Sget_select_type(region);
+				if(region_type==H5S_SEL_POINTS) {
+					h5str_dump_region_points_data(&h5str, region, region_obj);
+				}
+				else {
+					h5str_dump_region_blocks_data(&h5str, region, region_obj);
+				}
+
+                H5Sclose(region);
+            }
+            H5Dclose(region_obj);
+        }
+        jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+
+        ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+    }
+
+    h5str_free(&h5str);
+    free(ref_data);
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate2
+ * Signature: (ILjava/lang/String;IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint link_plist_id, jint create_plist_id, jint access_plist_id)
+{
+    hid_t    status;
+    char    *file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument(env, "_H5Dcreate2:  name is NULL");
+        return -1;
+    }
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError(env, "_H5Dcreate2:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Dcreate2((hid_t)loc_id, (const char*)file, (hid_t)type_id, (hid_t)space_id, (hid_t)link_plist_id, (hid_t)create_plist_id, (hid_t)access_plist_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, file);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dopen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dopen2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist)
+{
+    hid_t    status;
+    char    *file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument(env, "_H5Dopen2:  name is NULL");
+        return -1;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError(env, "_H5Dopen2:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Dopen2((hid_t)loc_id, file, (hid_t)access_plist );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, file);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate_anon
+ * Signature: (IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate_1anon
+  (JNIEnv *env, jclass clss, jint loc_id, jint type_id, jint space_id, jint dcpl_id, jint dapl_id)
+{
+    hid_t status;
+
+    status = H5Dcreate_anon((hid_t)loc_id, (hid_t)type_id, (hid_t)space_id, (hid_t)dcpl_id, (hid_t)dapl_id);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_space_status
+ * Signature: (I)I;
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1space_1status
+  (JNIEnv *env, jclass clss, jint loc_id)
+{
+    herr_t             status;
+    H5D_space_status_t space_status;
+
+    status = H5Dget_space_status((hid_t)loc_id, (H5D_space_status_t*)&space_status);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)space_status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_access_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1access_1plist
+  (JNIEnv *env, jclass clss, jint loc_id)
+{
+    hid_t status;
+
+    status = H5Dget_access_plist((hid_t)loc_id);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_offset
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1offset
+  (JNIEnv *env, jclass clss, jint loc_id)
+{
+    haddr_t offset;
+
+    offset = H5Dget_offset((hid_t)loc_id);
+    if (offset == HADDR_UNDEF) {
+        h5libraryError(env);
+    }
+    return (jlong)offset;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_get_buf_size_long
+ * Signature: (III)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size_1long
+(JNIEnv *env, jclass clss, jint dataset_id, jint type_id, jint space_id)
+{
+  herr_t  status;
+  hsize_t sz;
+
+  status = (jint)H5Dvlen_get_buf_size((hid_t)dataset_id, (hid_t)type_id,
+                                      (hid_t)space_id, (hsize_t*)&sz);
+
+  if (status < 0) {
+      h5libraryError(env);
+      return -1;
+  }
+
+  return (jlong)sz;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dfill
+ * Signature: ([BI[BII)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dfill
+  (JNIEnv *env, jclass clss, jbyteArray fill, jint fill_type_id, jbyteArray buf, jint buf_type_id, jint space_id)
+{
+    herr_t    status;
+    jbyte    *fillP;
+    jbyte    *buffP;
+    jboolean  isCopy1;
+    jboolean  isCopy2;
+    
+    if(fill) {
+        fillP = ENVPTR->GetByteArrayElements(ENVPAR fill, &isCopy1);
+        if (fillP == NULL) {
+            h5JNIFatalError( env, "H5Dfill:  fill not pinned");
+            return;
+        }
+    }
+    else
+        fillP = NULL;
+    
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dfill:  buf is NULL");
+        return;
+    }
+
+    buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy2);
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Dfill:  buf not pinned");
+        return;
+    }
+    
+    status = H5Dfill((const void*)fillP, (hid_t)fill_type_id, (void*)buffP, (hid_t)buf_type_id, (hid_t)space_id);
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+        if(fillP) {
+            ENVPTR->ReleaseByteArrayElements(ENVPAR fill, fillP, JNI_ABORT);
+        }
+        h5libraryError(env);
+        return;
+    }
+    
+    if (isCopy2 == JNI_TRUE) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, 0);
+    }
+    if(fillP) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR fill, fillP, JNI_ABORT);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dset_extent
+ * Signature: (I[J)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dset_1extent
+  (JNIEnv *env, jclass clss, jint loc_id, jlongArray buf)
+{
+    herr_t    status;
+    hsize_t  *dims;
+    jlong    *buffP;
+    jsize     rank;
+    jboolean  isCopy;
+    int       i = 0;
+
+    if (buf == NULL) {
+        h5nullArgument(env, "H5Dset_extent:  buf is NULL");
+        return;
+    }
+
+    rank = ENVPTR->GetArrayLength(ENVPAR buf);
+    if (rank <= 0) {
+        h5JNIFatalError(env, "H5Dset_extent:  rank <=0");
+        return;
+    }
+
+    buffP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dset_extent:  buf not pinned");
+        return;
+    }
+
+    dims = (hsize_t*) malloc(rank * sizeof(hsize_t));
+    for (i = 0; i< rank; i++)
+        dims[i] = (hsize_t)buffP[i];
+
+    status = H5Dset_extent((hid_t)loc_id, (hsize_t*)dims);
+
+    free (dims);
+
+    ENVPTR->ReleaseLongArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+}
+
+herr_t H5D_iterate_cb(void* elem, hid_t elem_id, unsigned ndim, const hsize_t *point, void *op_data) {
+    JNIEnv    *cbenv;
+    jint       status;
+    jclass     cls;
+    jmethodID  mid;
+    jbyteArray elemArray;
+    jlongArray pointArray;
+    jsize      size;
+
+    if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+    cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+    if (cls == 0) {
+       JVMPTR->DetachCurrentThread(JVMPAR);
+       return -1;
+    }
+    mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "([BII[JLncsa/hdf/hdf5lib/callbacks/H5D_iterate_t;)I");
+    if (mid == 0) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+    
+    if (elem == NULL) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+    if (point == NULL) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+
+    size = H5Tget_size(elem_id);
+    elemArray = CBENVPTR->NewByteArray(CBENVPAR size);
+    if (elemArray == NULL) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+    CBENVPTR->SetByteArrayRegion(CBENVPAR elemArray, 0, size, (jbyte *)elem);
+    
+    pointArray = CBENVPTR->NewLongArray(CBENVPAR 2);
+    if (pointArray == NULL) {
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return -1;
+    }
+    CBENVPTR->SetLongArrayRegion(CBENVPAR pointArray, 0, 2, (const jlong *)point);
+
+    status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, elemArray, elem_id, ndim, pointArray, op_data);
+
+    CBENVPTR->GetByteArrayRegion(CBENVPAR elemArray, 0, size, (jbyte *)elem);
+
+    JVMPTR->DetachCurrentThread(JVMPAR);
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Diterate
+ * Signature: ([BIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Diterate
+  (JNIEnv *env, jclass clss, jbyteArray buf, jint buf_type, jint space,
+          jobject callback_op, jobject op_data)
+{
+    jboolean      isCopy;
+    jbyte        *buffP;
+    herr_t        status = -1;
+    
+    ENVPTR->GetJavaVM(ENVPAR &jvm);
+    visit_callback = callback_op;
+
+    if (op_data == NULL) {
+        h5nullArgument(env,  "H5Diterate:  op_data is NULL");
+        return -1;
+    }
+    if (callback_op == NULL) {
+        h5nullArgument(env,  "H5Diterate:  callback_op is NULL");
+        return -1;
+    }
+
+    if (buf == NULL) {
+        h5nullArgument(env,  "H5Diterate:  buf is NULL");
+        return -1;
+    }
+    buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+    if (buffP == NULL) {
+        h5JNIFatalError(env, "H5Diterate:  buf not pinned");
+        return -1;
+    }
+    
+    status = H5Diterate((void*)buffP, (hid_t)buf_type, (hid_t)space, (H5D_operator_t)H5D_iterate_cb, (void*)op_data);
+    
+    if (status < 0) {
+       ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+       h5libraryError(env);
+       return status;
+    }
+    
+    if (isCopy == JNI_TRUE) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, 0);
+    }
+    
+    return status;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5dImp.h b/source/c/hdf-java/h5dImp.h
new file mode 100755
index 0000000..c43a5b5
--- /dev/null
+++ b/source/c/hdf-java/h5dImp.h
@@ -0,0 +1,348 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5D */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5D
+#define _Included_ncsa_hdf_hdf5lib_H5_H5D
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+extern JavaVM *jvm;
+extern jobject visit_callback;   
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate
+  (JNIEnv*, jclass, jint, jstring, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dchdir_ext
+ * Signature: (Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dchdir_1ext
+  (JNIEnv*, jclass, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dgetdir_1ext
+ * Signature: ([Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dgetdir_1ext
+  (JNIEnv*, jclass, jobjectArray, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dopen
+  (JNIEnv*, jclass, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_space
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1space
+  (JNIEnv*, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1type
+  (JNIEnv*, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1create_1plist
+  (JNIEnv*, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread
+ * Signature: (IIIII[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jbyteArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite
+ * Signature: (IIIII[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jbyteArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dextend
+ * Signature: (I[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dextend
+  (JNIEnv*, jclass, jint, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dclose
+  (JNIEnv*, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_storage_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1storage_1size
+  (JNIEnv*, jclass, jint);
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dcopy
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dcopy
+  (JNIEnv*, jclass, jint, jint);
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_get_buf_size
+ * Signature: (III[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size
+  (JNIEnv*, jclass, jint, jint, jint, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_reclaim
+ * Signature: (III[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1reclaim
+  (JNIEnv*, jclass, jint, jint, jint, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dget_space_status
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dget_1space_1status
+  (JNIEnv*, jclass, jint, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_short
+ * Signature: (IIIII[S[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1short
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jshortArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_int
+ * Signature: (IIIII[I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1int
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jintArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_long
+ * Signature: (IIIII[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1long
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jlongArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_float
+ * Signature: (IIIII[F[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1float
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jfloatArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_double
+ * Signature: (IIIII[D[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1double
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jdoubleArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_string
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1string
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jobjectArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_short
+ * Signature: (IIIII[S[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1short
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jshortArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_int
+ * Signature: (IIIII[I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1int
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jintArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_long
+ * Signature: (IIIII[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1long
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jlongArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_float
+ * Signature: (IIIII[F[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1float
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jfloatArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite_double
+ * Signature: (IIIII[D[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dwrite_1double
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jdoubleArray, jboolean);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5DwriteString
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5DwriteString
+  (JNIEnv *, jclass, jint, jint, jint, jint, jint, jobjectArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5DreadVL
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5DreadVL
+  (JNIEnv*, jclass, jint, jint, jint, jint, jint, jobjectArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_reg_ref
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1reg_1ref (JNIEnv*, jclass,
+    jint, jint, jint, jint, jint, jobjectArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_reg_ref_data
+ * Signature: (IIIII[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dread_1reg_1ref_1data (JNIEnv*, jclass,
+    jint, jint, jint, jint, jint, jobjectArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate2
+ * Signature: (ILjava/lang/String;IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate2
+  (JNIEnv *, jclass, jint, jstring, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dopen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dopen2
+  (JNIEnv *, jclass, jint, jstring, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Dcreate_anon
+ * Signature: (IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Dcreate_1anon
+  (JNIEnv *, jclass, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_space_status
+ * Signature: (I)I;
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1space_1status
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_access_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1access_1plist
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_offset
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dget_1offset
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_get_buf_size_long
+ * Signature: (III)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size_1long
+  (JNIEnv *, jclass, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dfill
+ * Signature: ([BI[BII)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dfill
+  (JNIEnv *, jclass, jbyteArray, jint, jbyteArray, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dset_extent
+ * Signature: (I[J)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Dset_1extent
+  (JNIEnv *, jclass, jint, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Diterate
+ * Signature: ([BIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Diterate
+  (JNIEnv *, jclass, jbyteArray, jint, jint, jobject, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5eImp.c b/source/c/hdf-java/h5eImp.c
new file mode 100755
index 0000000..8ffa75a
--- /dev/null
+++ b/source/c/hdf-java/h5eImp.c
@@ -0,0 +1,479 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  general library functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://www.hdfgroup.org/HDF5/doc/
+ *
+ */
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5eImp.h"
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eauto_is_v2
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eauto_1is_1v2
+      (JNIEnv *env, jclass cls, jint stk_id)
+    {
+        herr_t ret_val = -1;
+        unsigned int is_stack = 0;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eauto_is_v2: invalid argument");
+            return 0;
+        }
+        ret_val = H5Eauto_is_v2(stk_id, &is_stack);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return 0;
+        }
+        return is_stack;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eregister_class
+     * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eregister_1class
+      (JNIEnv *env, jclass cls, jstring cls_name, jstring lib_name, jstring version)
+    {
+        herr_t ret_val = -1;
+        char* the_cls_name;
+        char* the_lib_name;
+        char* the_version;
+        jboolean isCopy;
+
+        if(cls_name==NULL) {
+            h5nullArgument( env, "H5Eregister_class: error class name is NULL");
+            return ret_val;
+        }
+        the_cls_name = (char *)ENVPTR->GetStringUTFChars(ENVPAR cls_name,&isCopy);
+        if (the_cls_name == NULL) {
+            h5JNIFatalError( env, "H5Eregister_class: error class name not pinned");
+            return ret_val;
+        }
+        if(lib_name==NULL) {
+            h5nullArgument( env, "H5Eregister_class: client library or application name is NULL");
+            return ret_val;
+        }
+        the_lib_name = (char *)ENVPTR->GetStringUTFChars(ENVPAR lib_name,&isCopy);
+        if (the_lib_name == NULL) {
+            h5JNIFatalError( env, "H5Eregister_class: client name not pinned");
+            return ret_val;
+        }
+        if(version==NULL) {
+            h5nullArgument( env, "H5Eregister_class: version of the client library or application is NULL");
+            return ret_val;
+        }
+        the_version = (char *)ENVPTR->GetStringUTFChars(ENVPAR version,&isCopy);
+        if (the_version == NULL) {
+            h5JNIFatalError( env, "H5Eregister_class: version not pinned");
+            return ret_val;
+        }
+        ret_val = H5Eregister_class(the_cls_name, the_lib_name, the_version);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cls_name, the_cls_name);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR lib_name, the_lib_name);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR version, the_version);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+        return (jint)ret_val;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eunregister_class
+     * Signature: (I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eunregister_1class
+      (JNIEnv *env, jclass cls, jint cls_id)
+    {
+        herr_t ret_val = -1;
+
+        if (cls_id < 0) {
+            h5badArgument(env, "H5Eunregister_class: invalid argument");
+            return;
+        }
+        ret_val = H5Eunregister_class((hid_t)cls_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eclose_msg
+     * Signature: (I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclose_1msg
+      (JNIEnv *env, jclass cls, jint err_id)
+    {
+        herr_t ret_val = -1;
+
+        if (err_id < 0) {
+            h5badArgument(env, "H5Eclose_msg: invalid argument");
+            return;
+        }
+        ret_val = H5Eclose_msg((hid_t)err_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ecreate_msg
+     * Signature: (IILjava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ecreate_1msg
+      (JNIEnv *env, jclass cls, jint err_id, jint msg_type, jstring err_msg)
+    {
+        herr_t ret_val = -1;
+        char* the_err_msg;
+        jboolean isCopy;
+        H5E_type_t error_msg_type = (H5E_type_t)msg_type;
+
+        if (err_id < 0) {
+            h5badArgument(env, "H5Ecreate_msg: invalid argument");
+            return ret_val;
+        }
+        if(err_msg==NULL) {
+            h5nullArgument( env, "H5Ecreate_msg: error message is NULL");
+            return ret_val;
+        }
+        the_err_msg = (char *)ENVPTR->GetStringUTFChars(ENVPAR err_msg,&isCopy);
+        if (the_err_msg == NULL) {
+            h5JNIFatalError( env, "H5Ecreate_msg: error message not pinned");
+            return ret_val;
+        }
+        ret_val = H5Ecreate_msg((hid_t)err_id, error_msg_type, the_err_msg);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR err_msg, the_err_msg);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return ret_val;
+        }
+        return (jint)ret_val;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ecreate_stack
+     * Signature: ()I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ecreate_1stack
+      (JNIEnv *env, jclass cls)
+    {
+        jint ret_val = -1;
+        ret_val = H5Ecreate_stack();
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return -1;
+        }
+        return ret_val;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eget_current_stack
+     * Signature: ()I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1current_1stack
+      (JNIEnv *env, jclass cls)
+    {
+        hid_t ret_val = H5Eget_current_stack();
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return -1;
+        }
+        return ret_val;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eclose_stack
+     * Signature: (I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclose_1stack
+      (JNIEnv *env, jclass cls, jint stk_id)
+    {
+        herr_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eclose_stack: invalid argument");
+            return;
+        }
+        ret_val = H5Eclose_stack((hid_t)stk_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eprint1
+     * Signature: (Ljava/lang/Object;)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eprint1
+      (JNIEnv *env, jclass cls, jobject stream_obj)
+    {
+        herr_t ret_val = -1;
+
+        if(!stream_obj)
+            ret_val = H5Eprint1(stdout);
+        else
+            ret_val = H5Eprint1((FILE*)stream_obj);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eprint2
+     * Signature: (ILjava/lang/Object;)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eprint2
+      (JNIEnv *env, jclass cls, jint stk_id, jobject stream_obj)
+    {
+        herr_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eprint2: invalid argument");
+            return;
+        }
+        if(!stream_obj)
+            ret_val = H5Eprint2((hid_t)stk_id, stdout);
+        else
+            ret_val = H5Eprint2((hid_t)stk_id, (FILE*)stream_obj);
+        if (ret_val < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eget_class_name
+     * Signature: (I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1class_1name
+      (JNIEnv *env, jclass cls, jint cls_id)
+    {
+        char *namePtr;
+        jstring str;
+        ssize_t buf_size;
+
+        if (cls_id < 0) {
+            h5badArgument(env, "H5Eget_class_name: invalid argument");
+            return NULL;
+        }
+        /* get the length of the name */
+        buf_size = H5Eget_class_name(cls_id, NULL, 0);
+
+        if (buf_size < 0) {
+            h5badArgument( env, "H5Eget_class_name:  buf_size < 0");
+            return NULL;
+        }
+        if (buf_size == 0) {
+            h5badArgument( env, "H5Eget_class_name:  No class name");
+            return NULL;
+        }
+
+        buf_size++; /* add extra space for the null terminator */
+        namePtr = (char*)malloc(sizeof(char)*buf_size);
+        if (namePtr == NULL) {
+            h5outOfMemory( env, "H5Eget_class_name:  malloc failed");
+            return NULL;
+        }
+        buf_size = H5Eget_class_name((hid_t)cls_id, (char *)namePtr, (size_t)buf_size);
+
+        if (buf_size < 0) {
+            free(namePtr);
+            h5libraryError(env);
+            return NULL;
+        }
+
+        str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+        free(namePtr);
+
+        return str;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eset_current_stack
+     * Signature: (I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eset_1current_1stack
+      (JNIEnv *env, jclass cls, jint stk_id)
+    {
+        herr_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eset_current_stack: invalid argument");
+            return;
+        }
+        ret_val = H5Eset_current_stack(stk_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Epop
+     * Signature: (IJ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Epop
+      (JNIEnv *env, jclass cls, jint stk_id, jlong count)
+    {
+        herr_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Epop: invalid argument");
+            return;
+        }
+        ret_val = H5Epop(stk_id, (size_t)count);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eclear2
+     * Signature: (I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclear2
+      (JNIEnv *env, jclass cls, jint stk_id)
+    {
+        herr_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eclear2: invalid argument");
+            return;
+        }
+        ret_val = H5Eclear2(stk_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eget_msg
+     * Signature: (I[I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1msg
+      (JNIEnv *env, jclass cls, jint msg_id, jintArray error_msg_type_list)
+    {
+        char *namePtr;
+        jstring str;
+        jboolean isCopy;
+        ssize_t buf_size;
+        jint *theArray;
+        H5E_type_t error_msg_type;
+
+        if (msg_id < 0) {
+            h5badArgument(env, "H5Eget_msg: invalid argument");
+            return NULL;
+        }
+        /* get the length of the name */
+        buf_size = H5Eget_msg(msg_id, NULL, NULL, 0);
+
+        if (buf_size < 0) {
+            h5badArgument( env, "H5Eget_msg:  buf_size < 0");
+            return NULL;
+        }
+        if (buf_size == 0) {
+            h5badArgument( env, "H5Eget_msg:  No message");
+            return NULL;
+        }
+
+        buf_size++; /* add extra space for the null terminator */
+        namePtr = (char*)malloc(sizeof(char)*buf_size);
+        if (namePtr == NULL) {
+            h5outOfMemory( env, "H5Eget_msg:  malloc failed");
+            return NULL;
+        }
+        if ( error_msg_type_list == NULL ) {
+            h5nullArgument( env, "H5Eget_msg:  error_msg_type_list is NULL");
+            return NULL;
+        }
+        theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR error_msg_type_list,&isCopy);
+        if (theArray == NULL) {
+            h5JNIFatalError( env, "H5Eget_msg:  error_msg_type_list not pinned");
+            return NULL;
+        }
+
+        buf_size = H5Eget_msg((hid_t)msg_id, &error_msg_type, (char *)namePtr, (size_t)buf_size);
+
+        if (buf_size < 0) {
+            free(namePtr);
+            ENVPTR->ReleaseIntArrayElements(ENVPAR error_msg_type_list,theArray,JNI_ABORT);
+            h5libraryError(env);
+            return NULL;
+        }
+        theArray[0] = error_msg_type;
+        ENVPTR->ReleaseIntArrayElements(ENVPAR error_msg_type_list,theArray,0);
+
+        str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+        free(namePtr);
+
+        return str;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Eget_num
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1num
+      (JNIEnv *env, jclass cls, jint stk_id)
+    {
+        ssize_t ret_val = -1;
+
+        if (stk_id < 0) {
+            h5badArgument(env, "H5Eget_num: invalid argument");
+            return -1;
+        }
+        ret_val = H5Eget_num(stk_id);
+        if (ret_val < 0) {
+            h5libraryError(env);
+            return -1;
+        }
+        return ret_val;
+    }
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5eImp.h b/source/c/hdf-java/h5eImp.h
new file mode 100755
index 0000000..e55163a
--- /dev/null
+++ b/source/c/hdf-java/h5eImp.h
@@ -0,0 +1,142 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5E */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5E
+#define _Included_ncsa_hdf_hdf5lib_H5_H5E
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eauto_is_v2
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eauto_1is_1v2
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eregister_class
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eregister_1class
+  (JNIEnv *, jclass, jstring, jstring, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eunregister_class
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eunregister_1class
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eclose_msg
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclose_1msg
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Ecreate_msg
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ecreate_1msg
+  (JNIEnv *, jclass, jint, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Ecreate_stack
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ecreate_1stack
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eget_current_stack
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1current_1stack
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eclose_stack
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclose_1stack
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eprint1
+ * Signature: (Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eprint1
+  (JNIEnv *, jclass, jobject);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eprint2
+ * Signature: (ILjava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eprint2
+  (JNIEnv *, jclass, jint, jobject);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eget_class_name
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1class_1name
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eset_current_stack
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eset_1current_1stack
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Epop
+ * Signature: (IJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Epop
+  (JNIEnv *, jclass, jint, jlong);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eclear2
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eclear2
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eget_msg
+ * Signature: (I[I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1msg
+  (JNIEnv *, jclass, jint, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Eget_num
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Eget_1num
+  (JNIEnv *, jclass, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5fImp.c b/source/c/hdf-java/h5fImp.c
new file mode 100755
index 0000000..1eb8e6b
--- /dev/null
+++ b/source/c/hdf-java/h5fImp.c
@@ -0,0 +1,736 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  file interface functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5fImp.h"
+#include "h5util.h"
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fopen
+ * Signature: (Ljava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Fopen
+  (JNIEnv *env, jclass clss, jstring name, jint flags, jint access_id)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fopen:  name is NULL");
+        return -1;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fopen:  file name not pinned");
+        return -1;
+    }
+    status = H5Fopen(file, (unsigned) flags, (hid_t) access_id );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,file);
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fcreate
+ * Signature: (Ljava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Fcreate
+  (JNIEnv *env, jclass clss, jstring name, jint flags, jint create_id, jint access_id)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fcreate:  name is NULL");
+        return -1;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fcreate:  file name is not pinned");
+        return -1;
+    }
+
+    status = H5Fcreate(file, flags, create_id, access_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,file);
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fflush
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fflush
+  (JNIEnv *env, jclass clss, jint object_id, jint scope)
+{
+    herr_t retVal = -1;
+    retVal =  H5Fflush((hid_t) object_id, (H5F_scope_t) scope );
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_name
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1name
+  (JNIEnv *env, jclass cls, jint file_id)
+{
+    char *namePtr;
+    jstring str;
+    ssize_t buf_size;
+
+    /* get the length of the name */
+    buf_size = H5Fget_name(file_id, NULL, 0);
+
+    if (buf_size <= 0) {
+        h5badArgument( env, "H5Fget_name:  buf_size <= 0");
+        return NULL;
+    }
+
+    buf_size++; /* add extra space for the null terminator */
+    namePtr = (char*)malloc(sizeof(char)*buf_size);
+    if (namePtr == NULL) {
+        h5outOfMemory( env, "H5Fget_name:  malloc failed");
+        return NULL;
+    }
+
+    buf_size = H5Fget_name ((hid_t) file_id, (char *)namePtr, (size_t)buf_size);
+
+    if (buf_size < 0) {
+        free(namePtr);
+        h5libraryError(env);
+        return NULL;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+    free(namePtr);
+
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fis_hdf5
+ * Signature: (Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fis_1hdf5
+  (JNIEnv *env, jclass clss, jstring name)
+{
+    htri_t retVal = 0;
+    char * file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fis_hdf5:  name is NULL");
+        return JNI_FALSE;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fis_hdf5:  file name is not pinned");
+        return JNI_FALSE;
+    }
+
+    retVal = H5Fis_hdf5(file);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,file);
+
+    if (retVal > 0) {
+        return JNI_TRUE;
+    } 
+    else if (retVal == 0) {
+        return JNI_FALSE;
+    } 
+    else {
+        /*  raise exception here -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Fget_1create_1plist
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Fget_create_plist((hid_t) file_id );
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_access_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Fget_1access_1plist
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Fget_access_plist((hid_t) file_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_intent
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1intent
+  (JNIEnv *env, jclass cls, jint file_id)
+{
+    herr_t ret_val = -1;
+    unsigned intent = 0;
+
+    ret_val =  H5Fget_intent((hid_t) file_id, &intent);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)intent;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Fclose
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    herr_t status = -1;
+
+    if (file_id > 0)
+        status = H5Fclose((hid_t) file_id );
+
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fmount
+ * Signature: (ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fmount
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint child_id, jint plist_id)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fmount:  name is NULL");
+        return -1;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fmount:  file name is not pinned");
+        return -1;
+    }
+
+    status = H5Fmount((hid_t) loc_id, file, (hid_t) child_id, (hid_t) plist_id );
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,file);
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Funmount
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Funmount
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Funmount:  name is NULL");
+        return -1;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Funmount:  file name is not pinned");
+        /* exception -- out of memory? */
+        return -1;
+    }
+
+    status = H5Funmount((hid_t) loc_id, file );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,file);
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_freespace
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1freespace
+  (JNIEnv *env, jclass cls, jint file_id)
+{
+    hssize_t ret_val = -1;
+
+    ret_val = H5Fget_freespace((hid_t)file_id);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong)ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Freopen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Freopen
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Freopen((hid_t)file_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_ids_long
+ * Signature: (IIJ[I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1ids_1long
+  (JNIEnv *env, jclass cls, jint file_id, jint types, jlong maxObjs,
+          jintArray obj_id_list)
+{
+    ssize_t ret_val;
+    jint *obj_id_listP;
+    jboolean isCopy;
+    hid_t *id_list;
+    int rank;
+    int i;
+
+    ret_val = -1;
+
+    if ( obj_id_list == NULL ) {
+        h5nullArgument( env, "H5Fget_obj_ids_long:  obj_id_list is NULL");
+        return -1;
+    }
+
+    obj_id_listP = ENVPTR->GetIntArrayElements(ENVPAR obj_id_list,&isCopy);
+    if (obj_id_listP == NULL) {
+        h5JNIFatalError( env, "H5Fget_obj_ids_long:  obj_id_list not pinned");
+        return -1;
+    }
+    rank = (int)ENVPTR->GetArrayLength(ENVPAR obj_id_list);
+
+    id_list = (hid_t *)malloc( rank * sizeof(hid_t));
+
+    if (id_list == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR obj_id_list,obj_id_listP,JNI_ABORT);
+        h5JNIFatalError(env,  "H5Fget_obj_ids_long:  obj_id_list not converted to hid_t");
+        return -1;
+    }
+
+    ret_val = H5Fget_obj_ids((hid_t)file_id, (unsigned int)types, (size_t)maxObjs, id_list);
+
+    if (ret_val < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR obj_id_list,obj_id_listP,JNI_ABORT);
+        free(id_list);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i = 0; i < rank; i++) {
+        obj_id_listP[i] = id_list[i];
+    }
+    free(id_list);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR obj_id_list,obj_id_listP,0);
+
+    return (jlong)ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_ids
+ * Signature: (III[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1ids
+  (JNIEnv *env, jclass clss, jint file_id, jint types, jint obj_count, jintArray obj_id_list)
+{
+    ssize_t status=-1;
+    jint *obj_id_listP;
+    jboolean isCopy;
+
+    if ( obj_id_list == NULL ) {
+        h5nullArgument( env, "H5Fget_obj_ids:  obj_id_list is NULL");
+        return -1;
+    }
+
+    obj_id_listP = ENVPTR->GetIntArrayElements(ENVPAR obj_id_list,&isCopy);
+    if (obj_id_listP == NULL) {
+        h5JNIFatalError( env, "H5Fget_obj_ids:  obj_id_list not pinned");
+        return -1;
+    }
+
+    status = H5Fget_obj_ids((hid_t)file_id, (unsigned int)types, (int)obj_count, (hid_t*)obj_id_listP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR obj_id_list,obj_id_listP,JNI_ABORT);
+        h5libraryError(env);
+    } 
+    else  {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR obj_id_list,obj_id_listP,0);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count(hid_t file_id, unsigned int types )
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count
+  (JNIEnv *env, jclass clss, jint file_id, jint types )
+{
+    ssize_t status = -1;
+
+    status = H5Fget_obj_count((hid_t)file_id, (unsigned int)types );
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count_long
+ * Signature: (II)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count_1long
+  (JNIEnv *env, jclass cls, jint file_id, jint types)
+{
+    ssize_t ret_val = -1;
+
+    ret_val = H5Fget_obj_count((hid_t)file_id, (unsigned int)types );
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong)ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_name
+ * Signature: (ILjava/lang/String;I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_2name
+  (JNIEnv *env, jclass clss, jint obj_id, jstring name, jint buf_size)
+{
+    char *aName;
+    jstring str;
+    ssize_t size;
+
+    if (buf_size <= 0) {
+        h5badArgument( env, "H5Fget_name:  buf_size <= 0");
+        return NULL;
+    }
+    aName = (char*)malloc(sizeof(char)*buf_size);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Fget_name:  malloc failed");
+        return NULL;
+    }
+    size = H5Fget_name ((hid_t) obj_id, (char *)aName, (size_t)buf_size);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        return NULL; 
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    free(aName);
+
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_filesize
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1filesize
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    herr_t status;
+    hsize_t size = 0;
+
+    status = H5Fget_filesize ((hid_t) file_id, (hsize_t *) &size);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_mdc_hit_rate
+ * Signature: (I)D
+ */
+JNIEXPORT jdouble JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1mdc_1hit_1rate
+  (JNIEnv *env, jclass cls, jint file_id)
+{
+    double rate = 0.0;
+    herr_t ret_val = -1;
+
+    ret_val = H5Fget_mdc_hit_rate((hid_t)file_id, &rate);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+
+    return (jdouble)rate;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_mdc_size
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1mdc_1size
+  (JNIEnv *env, jclass cls, jint file_id, jlongArray metadata_cache)
+{
+    herr_t ret_val = -1;
+    jint size = 0;
+    jlong *metadata_cache_ptr;
+    size_t max_size=0, min_clean_size=0, cur_size=0;
+    int cur_num_entries=0;
+    jboolean isCopy;
+
+    if ( metadata_cache == NULL ) {
+        h5nullArgument( env, "H5Fget_mdc_size:  metadata_cache is NULL");
+        return -1;
+    }
+
+    size = (int)ENVPTR->GetArrayLength(ENVPAR metadata_cache);
+    if (size < 3) {
+        h5badArgument(env, "H5Fget_mdc_size:  length of metadata_cache < 3.");
+        return -1;
+    }
+
+    ret_val = H5Fget_mdc_size((hid_t)file_id, &max_size, &min_clean_size,
+            &cur_size, &cur_num_entries);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    metadata_cache_ptr = ENVPTR->GetLongArrayElements(ENVPAR metadata_cache,&isCopy);
+    metadata_cache_ptr[0] = max_size;
+    metadata_cache_ptr[1] = min_clean_size;
+    metadata_cache_ptr[2] = cur_size;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR metadata_cache, metadata_cache_ptr, 0);
+
+    return (jint)cur_num_entries;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Freset_mdc_hit_rate_stats
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Freset_1mdc_1hit_1rate_1stats
+  (JNIEnv *env, jclass cls, jint file_id)
+{
+    herr_t ret_val = -1;
+
+    ret_val = H5Freset_mdc_hit_rate_stats((hid_t)file_id);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5export_dataset
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5export_1dataset
+  (JNIEnv *env, jclass cls, jstring file_export_name, jstring file_name, jstring object_path, jint binary_order)
+{
+    herr_t status = -1;
+    herr_t ret_val = -1;
+    hid_t file_id = -1;
+    hid_t dataset_id = -1;
+    FILE *stream;
+    char *file_export;
+    char *file;
+    char *object_name;
+    jboolean isCopy;
+
+    if (file_export_name == NULL) {
+        h5nullArgument(env, "HDF5Library_export_data:  file_export_name is NULL");
+        return;
+    }
+    if (file_name == NULL) {
+        h5nullArgument(env, "HDF5Library_export_data:  file_name is NULL");
+        return;
+    }
+    if (object_path == NULL) {
+        h5nullArgument(env, "HDF5Library_export_data:  object_path is NULL");
+        return;
+    }
+
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR file_name, &isCopy);
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fopen:  file name not pinned");
+        return;
+    }
+
+    file_id = H5Fopen(file, (unsigned)H5F_ACC_RDWR, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, file);
+    if (file_id < 0) {
+        /* throw exception */
+        h5libraryError(env);
+        return;
+    }
+
+    object_name = (char*)ENVPTR->GetStringUTFChars(ENVPAR object_path, &isCopy);
+    if (object_name == NULL) {
+        h5JNIFatalError( env, "H5Dopen:  object name not pinned");
+        return;
+    }
+
+    dataset_id = H5Dopen2(file_id, (const char*)object_name, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR object_path, object_name);
+    if (dataset_id < 0) {
+        H5Fclose(file_id);
+        h5libraryError(env);
+        return;
+    }
+
+    file_export = (char *)ENVPTR->GetStringUTFChars(ENVPAR file_export_name, 0);
+    stream = fopen(file_export, "w+");
+    ENVPTR->ReleaseStringUTFChars(ENVPAR file_export_name, file_export);
+
+    ret_val = h5str_dump_simple_dset(stream, dataset_id, binary_order);
+
+    if (stream)
+        fclose(stream);
+
+    H5Dclose(dataset_id);
+
+    H5Fclose(file_id);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5fImp.h b/source/c/hdf-java/h5fImp.h
new file mode 100755
index 0000000..1cf2a5e
--- /dev/null
+++ b/source/c/hdf-java/h5fImp.h
@@ -0,0 +1,197 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5F */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5F
+#define _Included_ncsa_hdf_hdf5lib_H5_H5F
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fis_hdf5
+ * Signature: (Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fis_1hdf5
+  (JNIEnv *, jclass, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fcreate
+ * Signature: (Ljava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fcreate
+  (JNIEnv *, jclass, jstring, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fopen
+ * Signature: (Ljava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fopen
+  (JNIEnv *, jclass, jstring, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Freopen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Freopen
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fflush
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fflush
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fclose
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1create_1plist
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_access_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1access_1plist
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_intent
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1intent
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count_long
+ * Signature: (II)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count_1long
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count_long
+ * Signature: (II)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1obj_1count_1long
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fmount
+ * Signature: (ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fmount
+  (JNIEnv *, jclass, jint, jstring, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Funmount
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Funmount
+  (JNIEnv *, jclass, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_freespace
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1freespace
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_filesize
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1filesize
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_mdc_hit_rate
+ * Signature: (I)D
+ */
+JNIEXPORT jdouble JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1mdc_1hit_1rate
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_mdc_size
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1mdc_1size
+  (JNIEnv *, jclass, jint, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Freset_mdc_hit_rate_stats
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Freset_1mdc_1hit_1rate_1stats
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_name
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_1name
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_name
+ * Signature: (ILjava/lang/String;I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Fget_2name
+  (JNIEnv *, jclass, jint, jstring, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5export_dataset
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5export_1dataset
+  (JNIEnv *env, jclass cls, jstring file_export_name, jstring file_name, jstring object_path, jint binary_order);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5gImp.c b/source/c/hdf-java/h5gImp.c
new file mode 100755
index 0000000..1ad07ba
--- /dev/null
+++ b/source/c/hdf-java/h5gImp.c
@@ -0,0 +1,1336 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Group Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5gImp.h"
+#include "h5util.h"
+
+/* missing definitions from hdf5.h */
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+#ifdef __cplusplus
+    herr_t obj_info_all(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+    herr_t obj_info_max(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+    int H5Gget_obj_info_max(hid_t, char **, int *, int *, unsigned long *, int);
+    int H5Gget_obj_info_full( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder);
+#else
+    static herr_t obj_info_all(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+    static herr_t obj_info_max(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+    static int H5Gget_obj_info_max(hid_t, char **, int *, int *, unsigned long *, int);
+    static int H5Gget_obj_info_full( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder);
+#endif
+
+typedef struct info_all
+{
+    char **objname;
+    int *otype;
+    int *ltype;
+    unsigned long *objno;
+    unsigned long *fno;
+    unsigned long idxnum;
+    int count;
+} info_all_t;
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate
+ * Signature: (ILjava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jlong size_hint)
+{
+    hid_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gcreate:  name is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gcreate:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gcreate2((hid_t)loc_id, gName, (hid_t)H5P_DEFAULT, (hid_t)H5P_DEFAULT, (hid_t)H5P_DEFAULT );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gopen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    hid_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gopen:  name is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gopen:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gopen2((hid_t)loc_id, gName, (hid_t)H5P_DEFAULT );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gclose
+  (JNIEnv *env, jclass clss, jint group_id)
+{
+    herr_t retVal =  H5Gclose((hid_t)group_id) ;
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (IILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Glink
+  (JNIEnv *env, jclass clss, jint loc_id, jint link_type, jstring
+    current_name, jstring new_name)
+{
+    herr_t status;
+    char *cName, *nName;
+    jboolean isCopy;
+
+    if (current_name == NULL) {
+        h5nullArgument( env, "H5Glink:  current_name is NULL");
+        return -1;
+    }
+    if (new_name == NULL) {
+        h5nullArgument( env, "H5Glink:  new_name is NULL");
+        return -1;
+    }
+    cName = (char *)ENVPTR->GetStringUTFChars(ENVPAR current_name,&isCopy);
+    if (cName == NULL) {
+        h5JNIFatalError( env, "H5Glink:  current_name not pinned");
+        return -1;
+    }
+    nName = (char *)ENVPTR->GetStringUTFChars(ENVPAR new_name,&isCopy);
+    if (nName == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR current_name,cName);
+        h5JNIFatalError( env, "H5Glink:  new_name not pinned");
+        return -1;
+    }
+
+    status = H5Glink((hid_t)loc_id, (H5G_link_t)link_type, cName, nName);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR new_name,nName);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR current_name,cName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (ILjava/lang/String;IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Glink2
+  (JNIEnv *env, jclass clss,
+    jint current_loc_id, jstring current_name, jint link_type,
+    jint new_loc_id, jstring new_name)
+{
+    herr_t status;
+    char *cName, *nName;
+    jboolean isCopy;
+
+    if (current_name == NULL) {
+        h5nullArgument( env, "H5Glink2:  current_name is NULL");
+        return -1;
+    }
+    if (new_name == NULL) {
+        h5nullArgument( env, "H5Glink2:  new_name is NULL");
+        return -1;
+    }
+    cName = (char *)ENVPTR->GetStringUTFChars(ENVPAR current_name,&isCopy);
+    if (cName == NULL) {
+        h5JNIFatalError( env, "H5Glink2:  current_name not pinned");
+        return -1;
+    }
+    nName = (char *)ENVPTR->GetStringUTFChars(ENVPAR new_name,&isCopy);
+    if (nName == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR current_name,cName);
+        h5JNIFatalError( env, "H5Glink2:  new_name not pinned");
+        return -1;
+    }
+
+    status = H5Glink2((hid_t)current_loc_id, cName, (H5G_link_t)link_type, (hid_t)new_loc_id, nName);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR new_name,nName);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR current_name,cName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gunlink
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gunlink
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gunlink:  name is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gunlink:  name not pinned");
+        return -1;
+    }
+
+    status = H5Gunlink((hid_t)loc_id, gName );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gmove
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gmove
+  (JNIEnv *env, jclass clss, jint loc_id, jstring src, jstring dst)
+{
+    herr_t status;
+    char *sName, *dName;
+    jboolean isCopy;
+
+    if (src == NULL) {
+        h5nullArgument( env, "H5Gmove:  src is NULL");
+        return -1;
+    }
+    if (dst == NULL) {
+        h5nullArgument( env, "H5Gmove:  dst is NULL");
+        return -1;
+    }
+    sName = (char *)ENVPTR->GetStringUTFChars(ENVPAR src,&isCopy);
+    if (sName == NULL) {
+        h5JNIFatalError( env, "H5Gmove:  src not pinned");
+        return -1;
+    }
+    dName = (char *)ENVPTR->GetStringUTFChars(ENVPAR dst,&isCopy);
+    if (dName == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR src,sName);
+        h5JNIFatalError( env, "H5Gmove:  dst not pinned");
+        return -1;
+    }
+
+    status = H5Gmove((hid_t)loc_id, sName, dName );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR dst,dName);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR src,sName);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objinfo
+ * Signature: (ILjava/lang/String;Z[J[J[I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1objinfo
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jboolean follow_link,
+  jlongArray fileno, jlongArray objno, jintArray link_info, jlongArray mtime)
+{
+    char* gName;
+    jboolean isCopy;
+    herr_t retVal;
+    jint *linkInfo;
+    jlong *fileInfo, *objInfo, *timeInfo;
+    hbool_t follow;
+    H5G_stat_t h5gInfo;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  name is NULL");
+        return -1;
+    }
+    if (follow_link == JNI_TRUE) {
+        follow = TRUE;  /*  HDF5 'TRUE' */
+    }
+    else if (follow_link == JNI_FALSE) {
+        follow = FALSE;  /*  HDF5 'FALSE' */
+    }
+    else {
+        h5badArgument( env, "H5Gget_objinfo:  follow_link is invalid");
+        return -1;
+    }
+    if (fileno == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  fileno is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR fileno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  fileno input array < 2");
+        return -1;
+    }
+    if (objno == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  objno is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR objno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  objno input array < 2");
+        return -1;
+    }
+    if (link_info == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  link_info is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR link_info) < 3) {
+        h5badArgument( env, "H5Gget_objinfo:  link_info input array < 3");
+        return -1;
+    }
+    if (mtime == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  mtime is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gget_object:  name not pinned");
+        return -1;
+    }
+    fileInfo = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR fileno,&isCopy);
+    if (fileInfo == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5JNIFatalError( env, "H5Gget_object:  fileno not pinned");
+        return -1;
+    }
+    objInfo = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR objno,&isCopy);
+    if (objInfo == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fileno,fileInfo,JNI_ABORT);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5JNIFatalError( env, "H5Gget_object:  objno not pinned");
+        return -1;
+    }
+    linkInfo = (jint *)ENVPTR->GetIntArrayElements(ENVPAR link_info,&isCopy);
+    if (linkInfo == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR objno,objInfo,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fileno,fileInfo,JNI_ABORT);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5JNIFatalError( env, "H5Gget_object:  link_info not pinned");
+        return -1;
+    }
+    timeInfo = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR mtime,&isCopy);
+    if (timeInfo == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR link_info,linkInfo,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR objno,objInfo,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fileno,fileInfo,JNI_ABORT);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5JNIFatalError( env, "H5Gget_object:  mtime not pinned");
+        return -1;
+    }
+
+    retVal = H5Gget_objinfo((hid_t)loc_id, gName, follow, &h5gInfo);
+
+    if (retVal < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR mtime,timeInfo,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR objno,objInfo,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fileno,fileInfo,JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR link_info,linkInfo,JNI_ABORT);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5libraryError(env);
+        return -1;
+    }
+    else {
+        fileInfo[0] = (jlong)h5gInfo.fileno[0];
+        fileInfo[1] = (jlong)h5gInfo.fileno[1];
+        objInfo[0] = (jlong)h5gInfo.objno[0];
+        objInfo[1] = (jlong)h5gInfo.objno[1];
+        timeInfo[0] = (jlong)h5gInfo.mtime;
+        linkInfo[0] = (jint)h5gInfo.nlink;
+        linkInfo[1] = (jint)h5gInfo.type;
+        linkInfo[2] = (jint)h5gInfo.linklen;
+        ENVPTR->ReleaseLongArrayElements(ENVPAR mtime,timeInfo,0);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR objno,objInfo,0);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fileno,fileInfo,0);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR link_info,linkInfo,0);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_linkval
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1linkval
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint size,
+          jobjectArray value)
+{
+    char* gName;
+    jboolean isCopy;
+    char *lValue;
+    jstring str;
+    herr_t status;
+
+    if (size < 0) {
+        h5badArgument( env, "H5Gget_linkval:  size < 0");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_linkval:  name is NULL");
+        return -1;
+    }
+    lValue = (char *) malloc(sizeof(char)*size);
+    if (lValue == NULL) {
+        h5outOfMemory( env, "H5Gget_linkval:  malloc failed ");
+        return -1;
+    }
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (gName == NULL) {
+        free(lValue);
+        h5JNIFatalError( env, "H5Gget_linkval:  name not pinned");
+        return -1;
+    }
+
+    status = H5Gget_linkval((hid_t)loc_id, gName, (size_t)size, lValue);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status >= 0)
+    {
+        /* may throw OutOfMemoryError */
+        str = ENVPTR->NewStringUTF(ENVPAR lValue);
+        if (str == NULL) {
+            /* exception -- fatal JNI error */
+            free(lValue);
+            h5JNIFatalError( env, "H5Gget_linkval:  return string not created");
+            return -1;
+        }
+        /*  the SetObjectArrayElement may raise exceptions... */
+        ENVPTR->SetObjectArrayElement(ENVPAR value,0,(jobject)str);
+        free(lValue);
+    }
+    else {
+        free(lValue);
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gset_comment
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gset_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jstring comment)
+{
+    herr_t status;
+    char *gName, *gComment;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gset_comment:  name is NULL");
+        return -1;
+    }
+    if (comment == NULL) {
+        h5nullArgument( env, "H5Gset_comment:  comment is NULL");
+        return -1;
+    }
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gset_comment:  name not pinned");
+        return -1;
+    }
+    gComment = (char *)ENVPTR->GetStringUTFChars(ENVPAR comment,&isCopy);
+    if (gComment == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+        h5JNIFatalError( env, "H5Gset_comment:  comment not pinned");
+        return -1;
+    }
+
+    status = H5Gset_comment((hid_t)loc_id, gName, gComment);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR comment,gComment);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_comment
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint bufsize,
+  jobjectArray comment)
+{
+    char* gName;
+    jboolean isCopy;
+    char *gComment;
+    jstring str;
+    jint status;
+
+    if (bufsize <= 0) {
+        h5badArgument( env, "H5Gget_comment:  bufsize <= 0");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_comment:  name is NULL");
+        return -1;
+    }
+    if (comment == NULL) {
+        h5nullArgument( env, "H5Gget_comment:  comment is NULL");
+        return -1;
+    }
+    gComment = (char *)malloc(sizeof(char)*bufsize);
+    if (gComment == NULL) {
+        /* exception -- out of memory */
+        h5outOfMemory( env, "H5Gget_comment:  malloc failed");
+        return -1;
+    }
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (gName == NULL) {
+        free(gComment);
+        h5JNIFatalError( env, "H5Gget_comment:  name not pinned");
+        return -1;
+    }
+    status = H5Gget_comment((hid_t)loc_id, gName, (size_t)bufsize, gComment);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status >= 0)
+    {
+        /*  may throw OutOfMemoryError */
+        str = ENVPTR->NewStringUTF(ENVPAR gComment);
+        if (str == NULL) {
+            free(gComment);
+            h5JNIFatalError( env, "H5Gget_comment:  return string not allocated");
+            return -1;
+        }
+        /*  The SetObjectArrayElement may raise exceptions */
+        ENVPTR->SetObjectArrayElement(ENVPAR comment,0,(jobject)str);
+        free(gComment);
+    }
+    else {
+        free(gComment);
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+
+/***************************************************************
+ *                   New APIs for HDF5.1.8                    *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_num_objs
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1num_1objs
+  (JNIEnv *env, jclass clss, jint loc_id, jlongArray num_obj)
+{
+    int status;
+    jlong *num_objP;
+    jboolean isCopy;
+    hsize_t *num_obja;
+    int i;
+    int rank;
+
+    if (num_obj == NULL) {
+        h5nullArgument( env, "H5Gget_num_objs:  num_obj is NULL");
+        return -1;
+    }
+
+    num_objP = ENVPTR->GetLongArrayElements(ENVPAR num_obj,&isCopy);
+    if (num_objP == NULL) {
+        h5JNIFatalError(env,  "H5Gget_num_objs:  num_obj not pinned");
+        return -1;
+    }
+    rank = (int) ENVPTR->GetArrayLength(ENVPAR num_obj);
+    num_obja = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (num_obja == NULL)  {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR num_obj,num_objP,JNI_ABORT);
+        h5JNIFatalError(env,  "H5Gget_num_objs:  num_obj not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Gget_num_objs(loc_id, (hsize_t *)num_obja);
+
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR num_obj,num_objP,JNI_ABORT);
+        free(num_obja);
+        h5libraryError(env);
+        return -1;
+    }
+    for (i = 0; i < rank; i++) {
+            num_objP[i] = num_obja[i];
+        }
+        ENVPTR->ReleaseLongArrayElements(ENVPAR num_obj,num_objP,0);
+
+    free(num_obja);
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objname_by_idx
+ * Signature: (IJ[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1objname_1by_1idx
+  (JNIEnv *env, jclass clss, jint group_id, jlong idx,
+          jobjectArray name, jlong buf_size)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    long bs;
+
+    bs = (long)buf_size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Gget_objname_by_idx:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory(env, "H5Gget_objname_by_idx:  malloc failed");
+        return -1;
+    }
+    size = H5Gget_objname_by_idx((hid_t)group_id, (hsize_t)idx, aName, (size_t)buf_size);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        /*  exception, returns immediately */
+        return -1;
+    }
+    /* successful return -- save the string; */
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    if (str == NULL) {
+        free(aName);
+        h5JNIFatalError( env,"H5Gget_objname_by_idx:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+    ENVPTR->SetObjectArrayElement(ENVPAR name,0,str);
+
+    return (jlong)size;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objtype_by_idx
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1objtype_1by_1idx
+  (JNIEnv *env, jclass clss, jint group_id, jlong idx)
+{
+    int type;
+
+    type = H5Gget_objtype_by_idx((hid_t)group_id, (hsize_t)idx );
+    if (type < 0) {
+        h5libraryError(env);
+        /*  exception, returns immediately */
+        return -1;
+    }
+
+    return (jint)type;
+}
+
+/*
+/////////////////////////////////////////////////////////////////////////////////
+//
+//
+// Add these methods so that we don't need to call H5Gget_objtype_by_idx
+// in a loop to get information for all the object in a group, which takes
+// a lot of time to finish if the number of objects is more than 10,000
+//
+/////////////////////////////////////////////////////////////////////////////////
+*/
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_obj_info_full
+ * Signature: (ILjava/lang/String;[Ljava/lang/String;[I[I[J[JIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1obj_1info_1full
+  (JNIEnv *env, jclass clss, jint loc_id, jstring group_name,
+  jobjectArray objName, jintArray oType, jintArray lType, jlongArray fNo,
+  jlongArray oRef, jint n, jint indx_type, jint indx_order)
+{
+    herr_t ret_val = -1;
+    char *gName=NULL;
+    char **oName=NULL;
+    jboolean isCopy;
+    jstring str;
+    jint *otarr;
+    jint *ltarr;
+    jlong *refP;
+    jlong *fnoP;
+    unsigned long *refs=NULL;
+    unsigned long *fnos=NULL;
+    int i;
+    int gid = loc_id;
+    int indexType = indx_type;
+    int indexOrder = indx_order;
+
+    if (group_name != NULL) {
+        gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR group_name,&isCopy);
+        if (gName == NULL) {
+            h5JNIFatalError( env, "H5Gget_obj_info_full:  name not pinned");
+            return -1;
+        }
+        gid = H5Gopen2(loc_id, gName, H5P_DEFAULT);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR group_name,gName);
+
+        if(gid < 0) {
+            h5JNIFatalError( env, "H5Gget_obj_info_full: could not get group identifier");
+            return -1;
+        }
+    }
+
+    if (oType == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_full:  oType is NULL");
+        return -1;
+    }
+
+    if (lType == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_full:  lType is NULL");
+        return -1;
+    }
+
+    if (oRef == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_full:  oRef is NULL");
+        return -1;
+    }
+
+    otarr = ENVPTR->GetIntArrayElements(ENVPAR oType,&isCopy);
+    if (otarr == NULL) {
+        h5JNIFatalError( env, "H5Gget_obj_info_full:  otype not pinned");
+        return -1;
+    }
+
+    ltarr = ENVPTR->GetIntArrayElements(ENVPAR lType,&isCopy);
+    if (ltarr == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+        h5JNIFatalError( env, "H5Gget_obj_info_full:  ltype not pinned");
+        return -1;
+    }
+
+    refP = ENVPTR->GetLongArrayElements(ENVPAR oRef,&isCopy);
+    fnoP = ENVPTR->GetLongArrayElements(ENVPAR fNo,&isCopy);
+    if (refP == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+        h5JNIFatalError( env, "H5Gget_obj_info_full:  type not pinned");
+        return -1;
+    }
+
+    oName = (char **)calloc(n, sizeof (*oName));
+    if (!oName)
+      goto error;
+
+    refs = (unsigned long *)calloc(n, sizeof (unsigned long));
+    fnos = (unsigned long *)calloc(n, sizeof (unsigned long));
+    if (!refs || !fnos)
+      goto error;
+
+    ret_val = H5Gget_obj_info_full( (hid_t) gid, oName, (int *)otarr, (int *)ltarr, fnos, refs, indexType, indexOrder);
+
+    if (ret_val < 0)
+        goto error;
+
+    if (refs) {
+        for (i=0; i<n; i++) {
+            refP[i] = (jlong) refs[i];
+        }
+    }
+
+    if (fnos) {
+        for (i=0; i<n; i++) {
+            fnoP[i] = (jlong) fnos[i];
+        }
+    }
+
+    if (oName) {
+        for (i=0; i<n; i++) {
+            if (*(oName+i)) {
+                str = ENVPTR->NewStringUTF(ENVPAR *(oName+i));
+                ENVPTR->SetObjectArrayElement(ENVPAR objName,i,(jobject)str);
+            }
+        } /* for (i=0; i<n; i++)*/
+    }
+
+    if (group_name != NULL) H5Gclose(gid);
+  ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,0);
+  ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,0);
+  ENVPTR->ReleaseLongArrayElements(ENVPAR oRef,refP,0);
+  ENVPTR->ReleaseLongArrayElements(ENVPAR fNo,fnoP,0);
+  if (oName) h5str_array_free(oName, n);
+  if (refs) free(refs);
+  if (fnos) free(fnos);
+
+   return ret_val;
+
+error:
+  if (group_name != NULL) H5Gclose(gid);
+  ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,JNI_ABORT);
+  ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+  ENVPTR->ReleaseLongArrayElements(ENVPAR oRef,refP,JNI_ABORT);
+  ENVPTR->ReleaseLongArrayElements(ENVPAR fNo,fnoP,JNI_ABORT);
+  if (oName) h5str_array_free(oName, n);
+  if (refs) free(refs);
+  if (fnos) free(fnos);
+  h5libraryError(env);
+
+  return -1;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_obj_info_max
+ * Signature: (I[Ljava/lang/String;[I[I[JII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max
+  (JNIEnv *env, jclass clss, jint loc_id, jobjectArray objName,
+          jintArray oType, jintArray lType, jlongArray oRef,
+          int maxnum, int n)
+{
+    herr_t ret_val = -1;
+    char **oName=NULL;
+    jboolean isCopy;
+    jstring str;
+    jint *otarr;
+    jint *ltarr;
+    jlong *refP;
+    unsigned long *refs;
+    int i;
+
+    if (oType == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_max:  oType is NULL");
+        return -1;
+    }
+
+    if (lType == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_max:  lType is NULL");
+        return -1;
+    }
+
+    if (oRef == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_all:  oRef is NULL");
+        return -1;
+    }
+
+    otarr = ENVPTR->GetIntArrayElements(ENVPAR oType,&isCopy);
+    if (otarr == NULL) {
+        h5JNIFatalError( env, "H5Gget_obj_info_max:  otype not pinned");
+        return -1;
+    }
+
+    ltarr = ENVPTR->GetIntArrayElements(ENVPAR lType,&isCopy);
+    if (ltarr == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+        h5JNIFatalError( env, "H5Gget_obj_info_max:  ltype not pinned");
+        return -1;
+    }
+
+    refP = ENVPTR->GetLongArrayElements(ENVPAR oRef,&isCopy);
+    if (refP == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,JNI_ABORT);
+        h5JNIFatalError( env, "H5Gget_obj_info_all:  type not pinned");
+        return -1;
+    }
+
+    oName = (char **)calloc(n, sizeof (*oName));
+    refs = (unsigned long *)calloc(n, sizeof (unsigned long));
+
+    ret_val = H5Gget_obj_info_max( (hid_t) loc_id, oName, (int *)otarr, (int *)ltarr, refs, maxnum );
+
+    if (ret_val < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR oRef,refP,JNI_ABORT);
+        h5str_array_free(oName, n);
+        free(refs);
+        h5libraryError(env);
+        return -1;
+    }
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR lType,ltarr,JNI_ABORT);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR oType,otarr,JNI_ABORT);
+
+    if (refs) {
+        for (i=0; i<n; i++) {
+            refP[i] = (jlong) refs[i];
+        }
+    }
+    free(refs);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR oRef,refP,0);
+
+    if (oName) {
+        for (i=0; i<n; i++) {
+            if (*(oName+i)) {
+                str = ENVPTR->NewStringUTF(ENVPAR *(oName+i));
+                ENVPTR->SetObjectArrayElement(ENVPAR objName,i,(jobject)str);
+            }
+        } /* for (i=0; i<n; i++)*/
+    }
+
+    h5str_array_free(oName, n);
+
+    return ret_val;
+}
+
+int H5Gget_obj_info_full( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder)
+{
+    info_all_t info;
+    info.objname = objname;
+    info.otype = otype;
+    info.ltype = ltype;
+    info.idxnum = 0;
+    info.fno = fno;
+    info.objno = objno;
+    info.count = 0;
+
+    if(H5Literate(loc_id, (H5_index_t)indexType, (H5_iter_order_t)indexOrder, NULL, obj_info_all, (void *)&info) < 0){
+
+        /* iterate failed, try normal alphabetical order */
+        if(H5Literate(loc_id, H5_INDEX_NAME, H5_ITER_INC, NULL, obj_info_all, (void *)&info) < 0)
+            return -1;
+    }
+
+    return info.count;
+}
+
+int H5Gget_obj_info_max( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *objno, int maxnum )
+{
+    info_all_t info;
+    info.objname = objname;
+    info.otype = otype;
+    info.ltype = ltype;
+    info.idxnum = maxnum;
+    info.objno = objno;
+    info.count = 0;
+
+    if(H5Lvisit(loc_id, H5_INDEX_NAME, H5_ITER_NATIVE, obj_info_max, (void *)&info) < 0)
+        return -1;
+
+    return info.count;
+}
+
+herr_t obj_info_all(hid_t loc_id, const char *name, const H5L_info_t *info, void *op_data)
+{
+    int type = -1;
+    hid_t oid=-1;
+    herr_t retVal = -1;
+    info_all_t* datainfo = (info_all_t*)op_data;
+    H5O_info_t object_info;
+
+    retVal = H5Oget_info_by_name(loc_id, name, &object_info, H5P_DEFAULT);
+
+    if ( retVal < 0) {
+        *(datainfo->otype+datainfo->count) = -1;
+        *(datainfo->ltype+datainfo->count) = -1;
+        *(datainfo->objname+datainfo->count) = (char *) malloc(strlen(name)+1);
+        strcpy(*(datainfo->objname+datainfo->count), name);
+        *(datainfo->objno+datainfo->count) = -1;
+    }
+    else {
+        *(datainfo->otype+datainfo->count) = object_info.type;
+        *(datainfo->ltype+datainfo->count) = info->type;
+        *(datainfo->objname+datainfo->count) = (char *) malloc(strlen(name)+1);
+        strcpy(*(datainfo->objname+datainfo->count), name);
+
+		*(datainfo->fno+datainfo->count) = object_info.fileno;
+		*(datainfo->objno+datainfo->count) = (unsigned long)object_info.addr;
+		/*
+        if(info->type==H5L_TYPE_HARD)
+            *(datainfo->objno+datainfo->count) = (unsigned long)info->u.address;
+        else
+            *(datainfo->objno+datainfo->count) = info->u.val_size;
+        */
+    }
+
+    datainfo->count++;
+
+    return 0;
+}
+
+herr_t obj_info_max(hid_t loc_id, const char *name, const H5L_info_t *info, void *op_data)
+{
+    int type = -1;
+    herr_t retVal = 0;
+    info_all_t* datainfo = (info_all_t*)op_data;
+    H5O_info_t object_info;
+
+    retVal = H5Oget_info(loc_id, &object_info);
+    if ( retVal < 0) {
+        *(datainfo->otype+datainfo->count) = -1;
+        *(datainfo->ltype+datainfo->count) = -1;
+        *(datainfo->objname+datainfo->count) = NULL;
+        *(datainfo->objno+datainfo->count) = -1;
+        return 1;
+    }
+    else {
+        *(datainfo->otype+datainfo->count) = object_info.type;
+        *(datainfo->ltype+datainfo->count) = info->type;
+        /* this will be freed by h5str_array_free(oName, n)*/
+        *(datainfo->objname+datainfo->count) = (char *) malloc(strlen(name)+1);
+        strcpy(*(datainfo->objname+datainfo->count), name);
+    if(info->type==H5L_TYPE_HARD)
+            *(datainfo->objno+datainfo->count) = (unsigned long)info->u.address;
+        else
+            *(datainfo->objno+datainfo->count) = info->u.val_size;
+    }
+    datainfo->count++;
+    if(datainfo->count < (int)datainfo->idxnum)
+        return 0;
+    else
+        return 1;
+}
+
+/*
+ * Create a java object of hdf.h5.structs.H5G_info_t
+ * public class H5G_info_t {
+ *   public H5G_STORAGE_TYPE  storage_type; // Type of storage for links in group
+ *   public long     nlinks;       // Number of links in group
+ *   public long     max_corder;   // Current max. creation order value for group
+ *   public int      mounted;      // Whether group has a file mounted on it
+ * }
+ *
+ */
+jobject create_H5G_info_t(JNIEnv *env, H5G_info_t group_info)
+{
+    jclass cls;
+    jboolean jmounted;
+    jint storage_type;
+    jobject obj;
+    jfieldID fid_storage_type, fid_nlinks, fid_max_corder, fid_mounted;
+
+    cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5G_info_t");
+    if (cls == NULL) return NULL;
+
+    obj = ENVPTR->AllocObject(ENVPAR cls);
+    if (obj == NULL) return NULL;
+
+    fid_storage_type = ENVPTR->GetFieldID(ENVPAR cls, "storage_type", "I");
+    fid_nlinks = ENVPTR->GetFieldID(ENVPAR cls, "nlinks", "J");
+    fid_max_corder = ENVPTR->GetFieldID(ENVPAR cls, "max_corder", "J");
+    fid_mounted = ENVPTR->GetFieldID(ENVPAR cls, "mounted", "Z");
+
+    if (fid_storage_type==NULL || fid_nlinks==NULL || fid_max_corder==NULL ||
+            fid_mounted == NULL)
+        return NULL;
+
+    jmounted = (group_info.mounted==0) ? JNI_FALSE : JNI_TRUE;
+    storage_type = (jint)group_info.storage_type;
+
+    ENVPTR->SetIntField(ENVPAR obj, fid_storage_type, (jint)storage_type);
+    ENVPTR->SetLongField(ENVPAR obj, fid_nlinks, (jlong)group_info.nlinks);
+    ENVPTR->SetLongField(ENVPAR obj, fid_max_corder, (jlong)group_info.max_corder);
+    ENVPTR->SetBooleanField(ENVPAR obj, fid_mounted, jmounted);
+
+    return obj;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate2
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name,
+          jint link_plist_id, jint create_plist_id, jint access_plist_id)
+{
+    hid_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gcreate:  name is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gcreate:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gcreate2((hid_t)loc_id, gName, link_plist_id, create_plist_id, access_plist_id );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate_anon
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate_1anon
+  (JNIEnv *env, jclass cls, jint loc_id, jint gcpl_id, jint gapl_id)
+{
+    hid_t ret_val;
+
+    ret_val = H5Gcreate_anon((hid_t)loc_id, (hid_t)gcpl_id, (hid_t)gapl_id);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+    }
+    return (jint)ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gopen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gopen2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+{
+    hid_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gopen:  name is NULL");
+        return -1;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gopen:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gopen2((hid_t)loc_id, gName, (hid_t)access_plist_id );
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1create_1plist
+(JNIEnv *env, jclass cls, jint loc_id)
+{
+  hid_t ret_val;
+
+  ret_val = H5Gget_create_plist((hid_t)loc_id);
+
+  if (ret_val < 0) {
+      h5libraryError(env);
+  }
+
+  return (jint)ret_val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info
+ * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info
+  (JNIEnv *env, jclass cls, jint loc_id)
+{
+    H5G_info_t group_info;
+    herr_t ret_val = -1;
+
+    ret_val = H5Gget_info( (hid_t) loc_id, &group_info);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+        return NULL;
+    }
+
+    return create_H5G_info_t(env, group_info);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info_by_name
+ * Signature: (ILjava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info_1by_1name
+  (JNIEnv *env, jclass cls, jint loc_id, jstring name, jint lapl_id)
+{
+    H5G_info_t group_info;
+    herr_t ret_val = -1;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_info_by_name:  name is NULL");
+        return NULL;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gget_info_by_name:  file name not pinned");
+        return NULL;
+    }
+
+    ret_val = H5Gget_info_by_name((hid_t)loc_id, gName, &group_info, (hid_t)lapl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+        return NULL;
+    }
+
+    return create_H5G_info_t(env, group_info);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info_by_idx
+ * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info_1by_1idx
+  (JNIEnv *env, jclass cls, jint loc_id, jstring name, jint index_type,
+          jint order, jlong n, jint lapl_id)
+{
+    H5G_info_t group_info;
+    herr_t ret_val = -1;
+    char* gName;
+    jboolean isCopy;
+    H5_index_t cindex_type = (H5_index_t)index_type;
+    H5_iter_order_t corder = (H5_iter_order_t)order;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_info_by_idx:  name is NULL");
+        return NULL;
+    }
+
+    gName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gget_info_by_idx:  file name not pinned");
+        return NULL;
+    }
+
+    ret_val = H5Gget_info_by_idx((hid_t)loc_id, gName, cindex_type,
+            corder, (hsize_t)n, &group_info, (hid_t)lapl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,gName);
+
+    if (ret_val < 0) {
+        h5libraryError(env);
+        return NULL;
+    }
+
+    return create_H5G_info_t(env, group_info);
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5gImp.h b/source/c/hdf-java/h5gImp.h
new file mode 100644
index 0000000..7c6351c
--- /dev/null
+++ b/source/c/hdf-java/h5gImp.h
@@ -0,0 +1,192 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5G */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5G
+#define _Included_ncsa_hdf_hdf5lib_H5_H5G
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gclose
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate2
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate2
+  (JNIEnv *, jclass, jint, jstring, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate_anon
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate_1anon
+  (JNIEnv *, jclass, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gopen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gopen2
+  (JNIEnv *, jclass, jint, jstring, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (ILjava/lang/String;IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Glink2
+  (JNIEnv *env, jclass clss, 
+    jint current_loc_id, jstring current_name, jint link_type, 
+    jint new_loc_id, jstring new_name);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gunlink
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gunlink
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_comment
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint bufsize,
+  jobjectArray comment);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gset_comment
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gset_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jstring comment);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_linkval
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1linkval
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint size, 
+  jobjectArray value);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gmove
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gmove
+  (JNIEnv *env, jclass clss, jint loc_id, jstring src, jstring dst);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objinfo
+ * Signature: (ILjava/lang/String;Z[J[J[I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1objinfo
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jboolean follow_link,
+  jlongArray fileno, jlongArray objno, jintArray link_info, jlongArray mtime);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_num_objs
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1num_1objs
+  (JNIEnv *env, jclass clss, jint loc_id, jlongArray num_obj);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1create_1plist
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info
+ * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info_by_name
+ * Signature: (ILjava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info_1by_1name
+  (JNIEnv *, jclass, jint, jstring, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info_by_idx
+ * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1info_1by_1idx
+  (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gcreate
+ * Signature: (ILjava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gcreate
+  (JNIEnv *, jclass, jint, jstring, jlong);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Gopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Gopen
+  (JNIEnv *, jclass, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (IILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Glink
+  (JNIEnv *env, jclass clss, jint loc_id, jint link_type, jstring
+    current_name, jstring new_name);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_obj_info_full
+ * Signature: (ILjava/lang/String;[Ljava/lang/String;[I[I[JIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1obj_1info_1full
+  (JNIEnv *env, jclass clss, jint loc_id, jstring group_name, 
+  jobjectArray objName, jintArray oType, jintArray lType, jlongArray fNo, 
+  jlongArray oRef, jint n, jint indx_type, jint indx_order);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_obj_info_max
+ * Signature: (I[Ljava/lang/String;[I[I[JII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max
+  (JNIEnv *env, jclass clss, jint loc_id, jobjectArray objName, 
+          jintArray oType, jintArray lType, jlongArray oRef, 
+          int maxnum, int n);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5iImp.c b/source/c/hdf-java/h5iImp.c
new file mode 100755
index 0000000..1bfd0b2
--- /dev/null
+++ b/source/c/hdf-java/h5iImp.c
@@ -0,0 +1,223 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Identifier API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <stdlib.h>
+#include <jni.h>
+#include "h5jni.h"
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iget_1type
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    H5I_type_t retVal = H5I_BADID;
+    retVal =  H5Iget_type((hid_t)obj_id);
+    if (retVal == H5I_BADID) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.2 versus release 1.6.1          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_name
+ * Signature: (ILjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iget_1name
+  (JNIEnv *env, jclass clss, jint obj_id, jobjectArray name, jlong buf_size)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    long bs;
+
+    bs = (long)buf_size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Iget_name:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Iget_name:  malloc failed");
+        return -1;
+    }
+
+    size = H5Iget_name((hid_t)obj_id, aName, (size_t)buf_size);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        return -1;
+        /*  exception, returns immediately */
+    }
+    /* successful return -- save the string; */
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    ENVPTR->SetObjectArrayElement(ENVPAR name,0,str);
+
+    free(aName);
+    return (jlong)size;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iget_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Iget_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iinc_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iinc_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Iinc_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Idec_1ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Idec_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Idec_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_file_id
+ * Signature: (I)I
+ */
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iget_1file_1id
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    hid_t file_id = 0;
+
+    file_id = H5Iget_file_id ((hid_t) obj_id);
+
+    if (file_id < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) file_id;
+}
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.8.0                               *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Iget_1type_1ref
+  (JNIEnv *env, jclass clss, jint type)
+{
+  int retVal;
+
+  retVal = H5Iget_type_ref((H5I_type_t)type);
+
+  
+  if (retVal <0){
+    h5libraryError(env);
+  }
+
+  return (jint)retVal;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Inmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Inmembers
+  (JNIEnv *env, jclass clss, jint type)
+{
+   herr_t retVal;
+   hsize_t num_members;
+
+   retVal = H5Inmembers((H5I_type_t)type, &num_members);
+
+   if (retVal <0){
+    h5libraryError(env);
+  }
+
+   return (jint)num_members;
+
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5jni.h b/source/c/hdf-java/h5jni.h
new file mode 100755
index 0000000..89e7acb
--- /dev/null
+++ b/source/c/hdf-java/h5jni.h
@@ -0,0 +1,53 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include "H5version.h"
+#include <string.h>
+
+#ifndef _Included_h5jni
+#define _Included_h5jni
+
+#ifdef __cplusplus
+#define ENVPTR (env)
+#define ENVPAR
+#define ENVONLY
+#else
+#define ENVPTR (*env)
+#define ENVPAR env,
+#define ENVONLY env
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+extern jboolean h5JNIFatalError(JNIEnv *, char *);
+extern jboolean h5nullArgument(JNIEnv *, char *);
+extern jboolean h5badArgument (JNIEnv *, char *);
+extern jboolean h5outOfMemory (JNIEnv *, char *);
+extern jboolean h5libraryError(JNIEnv *env );
+extern jboolean h5raiseException(JNIEnv *, char *, char *);
+extern jboolean h5unimplemented( JNIEnv *env, char *functName);
+
+/* implemented at H5.c */
+extern jint get_enum_value(JNIEnv *env, jobject enum_obj);
+extern jobject get_enum_object(JNIEnv *env, const char* enum_class_name,
+    jint enum_val, const char* enum_field_desc);
+
+/* implemented at H5G.c */
+extern jobject create_H5G_info_t(JNIEnv *env, H5G_info_t group_info);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/source/c/hdf-java/h5lImp.c b/source/c/hdf-java/h5lImp.c
new file mode 100755
index 0000000..601a85e
--- /dev/null
+++ b/source/c/hdf-java/h5lImp.c
@@ -0,0 +1,1037 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Link Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5lImp.h"
+
+#ifdef __cplusplus
+#define CBENVPTR (cbenv)
+#define CBENVPAR 
+#define JVMPTR (jvm)
+#define JVMPAR 
+#define JVMPAR2 
+#else
+#define CBENVPTR (*cbenv)
+#define CBENVPAR cbenv,
+#define JVMPTR (*jvm)
+#define JVMPAR jvm
+#define JVMPAR2 jvm,
+#endif
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcopy
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcopy
+      (JNIEnv *env, jclass clss, jint cur_loc_id, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lCurName;
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (cur_name == NULL) {
+            h5nullArgument(env, "H5Lcopy:  cur_name is NULL");
+            return;
+        }
+        
+        lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name, &isCopy);
+        if (lCurName == NULL) {
+            h5JNIFatalError(env, "H5Lcopy:  cur_name not pinned");
+            return;
+        }
+        
+        if (dst_name == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5nullArgument(env, "H5Lcopy:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+        if (lDstName == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5JNIFatalError(env, "H5Lcopy:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Lcopy((hid_t)cur_loc_id, (const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_external
+     * Signature: (Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1external
+    (JNIEnv *env, jclass clss, jstring file_name, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+  {
+      char    *lFileName;
+      char    *lCurName;
+      char    *lDstName;
+      jboolean isCopy;
+      herr_t   status = -1;
+      
+      if (file_name == NULL) {
+          h5nullArgument(env, "H5Lcreate_external:  file_name is NULL");
+          return;
+      }
+      
+      lFileName = (char*)ENVPTR->GetStringUTFChars(ENVPAR file_name, &isCopy);
+      if (lFileName == NULL) {
+          h5JNIFatalError(env, "H5Lcreate_external:  file_name not pinned");
+          return;
+      }
+     
+      if (cur_name == NULL) {
+          ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, lFileName);
+          h5nullArgument(env, "H5Lcreate_external:  cur_name is NULL");
+          return;
+      }
+      
+      lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name,&isCopy);
+      if (lCurName == NULL) {
+          ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, lFileName);
+          h5JNIFatalError(env, "H5Lcreate_external:  cur_name not pinned");
+          return;
+      }
+      
+      if (dst_name == NULL) {
+          ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, lFileName);
+          ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+          h5nullArgument(env, "H5Lcreate_external:  dst_name is NULL");
+          return;
+      }
+      
+      lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+      if (lDstName == NULL) {
+          ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, lFileName);
+          ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+          h5JNIFatalError(env, "H5Lcreate_external:  dst_name not pinned");
+          return;
+      }
+
+      status = H5Lcreate_external((const char*)lFileName, (const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+      ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, lFileName);
+      ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+      ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+      
+      if (status < 0) {
+         h5libraryError(env);
+         return;
+      }
+      
+      return;
+  }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_hard
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1hard
+      (JNIEnv *env, jclass clss, jint cur_loc_id, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lCurName;
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (cur_name == NULL) {
+            h5nullArgument(env, "H5Lcreate_hard:  cur_name is NULL");
+            return;
+        }
+        
+        lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name, &isCopy);
+        if (lCurName == NULL) {
+            h5JNIFatalError(env, "H5Lcreate_hard:  cur_name not pinned");
+            return;
+        }
+        
+        if (dst_name == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5nullArgument(env, "H5Lcreate_hard:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+        if (lDstName == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5JNIFatalError(env, "H5Lcreate_hard:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Lcreate_hard((hid_t)cur_loc_id, (const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_soft
+     * Signature: (Ljava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1soft
+      (JNIEnv *env, jclass clss, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lCurName;
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (cur_name == NULL) {
+            h5nullArgument(env, "H5Lcreate_soft:  cur_name is NULL");
+            return;
+        }
+        
+        lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name, &isCopy);
+        if (lCurName == NULL) {
+            h5JNIFatalError(env, "H5Lcreate_soft:  cur_name not pinned");
+            return;
+        }
+        
+        if (dst_name == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5nullArgument(env, "H5Lcreate_soft:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name,&isCopy);
+        if (lDstName == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5JNIFatalError(env, "H5Lcreate_soft:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Lcreate_soft((const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ldelete
+     * Signature: (ILjava/lang/String;I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ldelete
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id)
+    {
+        char    *lName;
+        jboolean isCopy;
+        herr_t   status = -1;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Ldelete:  name is NULL");
+            return;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Ldelete:  name not pinned");
+            return;
+        }
+        
+        status = H5Ldelete((hid_t)loc_id, (const char*)lName, (hid_t)access_id);
+        
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        
+        if (status < 0) {
+            h5libraryError(env);
+            return;
+        }
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ldelete_by_idx
+     * Signature: (ILjava/lang/String;IIJI)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ldelete_1by_1idx
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint index_field, jint order, jlong link_n, jint access_id)
+    {
+        char      *lName;
+        herr_t     status;
+        jboolean   isCopy;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Ldelete_by_idx:  name is NULL");
+            return;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Ldelete_by_idx:  name not pinned");
+            return;
+        }
+
+        status = H5Ldelete_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lexists
+     * Signature: (ILjava/lang/String;I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lexists
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id)
+    {
+        char    *lName;
+        jboolean isCopy;
+        htri_t   bval = 0;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lexists:  name is NULL");
+            return JNI_FALSE;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lexists:  name not pinned");
+            return JNI_FALSE;
+        }
+        
+        bval = H5Lexists((hid_t)loc_id, (const char*)lName, (hid_t)access_id);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        
+        if (bval > 0) {
+            return JNI_TRUE;
+        }
+        else if (bval == 0) {
+            return JNI_FALSE;
+        }
+        else {
+            h5libraryError(env);
+            return JNI_FALSE;
+        }
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_info
+     * Signature: (ILjava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5L_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1info
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id)
+    {
+        char      *lName;
+        herr_t     status;
+        H5L_info_t infobuf;
+        jboolean   isCopy;
+        jclass     cls;
+        jmethodID  constructor;
+        jvalue     args[5];
+        jobject    ret_info_t = NULL;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lget_info:  name is NULL");
+            return NULL;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lget_info:  name not pinned");
+            return NULL;
+        }
+
+        status = H5Lget_info((hid_t)loc_id, (const char*)lName, (H5L_info_t*)&infobuf, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return NULL;
+        }
+
+        // get a reference to your class if you don't have it already
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5L_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IZJIJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID failed\n");
+           return NULL;
+        }
+        args[0].i = infobuf.type;
+        args[1].z = infobuf.corder_valid;
+        args[2].j = infobuf.corder;
+        args[3].i = infobuf.cset;
+        if(infobuf.type==0)
+            args[4].j = infobuf.u.address;
+        else
+            args[4].j = infobuf.u.val_size;
+        ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+        return ret_info_t;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_info_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5L_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint index_field, jint order, jlong link_n, jint access_id)
+    {
+        char      *lName;
+        herr_t     status;
+        H5L_info_t infobuf;
+        jboolean   isCopy;
+        jclass     cls;
+        jmethodID  constructor;
+        jvalue     args[5];
+        jobject    ret_info_t = NULL;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lget_info_by_idx:  name is NULL");
+            return NULL;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lget_info_by_idx:  name not pinned");
+            return NULL;
+        }
+
+        status = H5Lget_info_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (H5L_info_t*)&infobuf, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return NULL;
+        }
+
+        // get a reference to your class if you don't have it already
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5L_info_t");
+        if (cls == 0) {
+            h5JNIFatalError(env, "JNI error: GetObjectClass failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IZJIJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError(env, "JNI error: GetMethodID failed\n");
+           return NULL;
+        }
+        args[0].i = infobuf.type;
+        args[1].z = infobuf.corder_valid;
+        args[2].j = infobuf.corder;
+        args[3].i = infobuf.cset;
+        if(infobuf.type==0)
+            args[4].j = infobuf.u.address;
+        else
+            args[4].j = infobuf.u.val_size;
+        ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+        return ret_info_t;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_name_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Ljava/lang/String;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint index_field, jint order, jlong link_n, jint access_id)
+    {
+        size_t   buf_size;
+        char    *lName;
+        char    *lValue;
+        jboolean isCopy;
+        jlong    status_size;
+        jstring  str = NULL;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lget_name_by_idx:  name is NULL");
+            return NULL;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lget_name_by_idx:  name not pinned");
+            return NULL;
+        }
+
+        /* get the length of the link name */
+        status_size = H5Lget_name_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)NULL, (size_t)0, (hid_t)H5P_DEFAULT);
+        if(status_size < 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5libraryError(env);
+            return NULL;
+        }
+        buf_size = (size_t)status_size + 1;/* add extra space for the null terminator */
+        
+        lValue = (char*)malloc(sizeof(char) * buf_size);
+        if (lValue == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5outOfMemory(env, "H5Lget_name_by_idx:  malloc failed ");
+            return NULL;
+        }
+
+        status_size = H5Lget_name_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)lValue, (size_t)buf_size, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status_size < 0) {
+            free(lValue);
+            h5libraryError(env);
+            return NULL;
+        }
+        /* may throw OutOfMemoryError */
+        str = ENVPTR->NewStringUTF(ENVPAR lValue);
+        if (str == NULL) {
+            /* exception -- fatal JNI error */
+            free(lValue);
+            h5JNIFatalError(env, "H5Lget_name_by_idx:  return string not created");
+            return NULL;
+        }
+
+        free(lValue);
+
+        return str;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_val
+     * Signature: (ILjava/lang/String;[Ljava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1val
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jobjectArray link_value, jint access_id)
+    {
+        size_t      buf_size;
+        herr_t      status;
+        H5L_info_t  infobuf;
+        char       *lName;
+        char       *lValue;
+        const char *file_name;
+        const char *obj_name;
+        jboolean    isCopy;
+        jstring     str;
+        
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lget_val:  name is NULL");
+            return -1;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lget_val:  name not pinned");
+            return -1;
+        }
+
+        /* get the length of the link val */
+        status = H5Lget_info((hid_t)loc_id, (const char*)lName, (H5L_info_t*)&infobuf, (hid_t)H5P_DEFAULT);
+        if(status < 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5libraryError(env);
+            return -1;
+        }
+        buf_size = infobuf.u.val_size + 1;/* add extra space for the null terminator */
+        
+        if(infobuf.type == H5L_TYPE_HARD) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5JNIFatalError(env, "H5Lget_val:  link is hard type");
+            return -1;
+        }
+        
+        lValue = (char*)malloc(sizeof(char) * buf_size);
+        if (lValue == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5outOfMemory(env, "H5Lget_val:  malloc failed");
+            return -1;
+        }
+
+        status = H5Lget_val((hid_t)loc_id, (const char*)lName, (void*)lValue, (size_t)buf_size, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        if (status < 0) {
+           free(lValue);
+           h5libraryError(env);
+           return -1;
+        }
+        /* may throw OutOfMemoryError */
+        if(infobuf.type == H5L_TYPE_EXTERNAL) {
+            status = H5Lunpack_elink_val((char*)lValue, (size_t)infobuf.u.val_size, (unsigned*)NULL, (const char**)&file_name, (const char**)&obj_name);
+            if (status < 0) {
+               free(lValue);
+               h5libraryError(env);
+               return -1;
+            }
+            
+            str = ENVPTR->NewStringUTF(ENVPAR obj_name);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+            
+            str = ENVPTR->NewStringUTF(ENVPAR file_name);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 1, str);
+        }
+        else {
+            str = ENVPTR->NewStringUTF(ENVPAR lValue);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+        }
+
+        free(lValue);
+
+        return infobuf.type;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_val_by_idx
+     * Signature: (ILjava/lang/String;IIJ[Ljava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1val_1by_1idx
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint index_field, jint order, 
+            jlong link_n, jobjectArray link_value, jint access_id)
+    {
+        herr_t      status;
+        size_t      buf_size;
+        H5L_info_t  infobuf;
+        char       *lName;
+        void       *lValue;
+        const char *file_name;
+        const char *obj_name;
+        jboolean    isCopy;
+        jstring     str;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lget_val_by_idx:  name is NULL");
+            return -1;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lget_val_by_idx:  name not pinned");
+            return -1;
+        }
+
+        /* get the length of the link valuee */
+        status = H5Lget_info_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (H5L_info_t*)&infobuf, (hid_t)access_id);
+        if(status < 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5libraryError(env);
+            return -1;
+        }
+        buf_size = infobuf.u.val_size;
+        if(buf_size < 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5libraryError(env);
+            return -1;
+        }
+        lValue = (void*)malloc(buf_size);
+        if (lValue == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+            h5outOfMemory(env, "H5Lget_val_by_idx:  malloc failed ");
+            return -1;
+        }
+
+        status = H5Lget_val_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (void*)lValue, (size_t)buf_size, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        if (status < 0) {
+           free(lValue);
+           h5libraryError(env);
+           return -1;
+        }
+        /* may throw OutOfMemoryError */
+        if(infobuf.type == H5L_TYPE_EXTERNAL) {
+            status = H5Lunpack_elink_val((char*)lValue, (size_t)infobuf.u.val_size, (unsigned*)NULL, (const char**)&file_name, (const char**)&obj_name);
+            if (status < 0) {
+               free(lValue);
+               h5libraryError(env);
+               return -1;
+            }
+            
+            str = ENVPTR->NewStringUTF(ENVPAR obj_name);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val_by_idx:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+            
+            str = ENVPTR->NewStringUTF(ENVPAR file_name);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val_by_idx:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 1, str);
+        }
+        else {
+            str = ENVPTR->NewStringUTF(ENVPAR (char *)lValue);
+            if (str == NULL) {
+                /* exception -- fatal JNI error */
+                free(lValue);
+                h5JNIFatalError(env, "H5Lget_val_by_idx:  return string not created");
+                return -1;
+            }
+            ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+        }
+
+        free(lValue);
+
+        return infobuf.type;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lmove
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lmove
+      (JNIEnv *env, jclass clss, jint cur_loc_id, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lCurName;
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (cur_name == NULL) {
+            h5nullArgument(env, "H5Lcreate_hard:  cur_name is NULL");
+            return;
+        }
+        
+        lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name, &isCopy);
+        if (lCurName == NULL) {
+            h5JNIFatalError(env, "H5Lcreate_hard:  cur_name not pinned");
+            return;
+        }
+        
+        if (dst_name == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5nullArgument(env, "H5Lcreate_hard:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+        if (lDstName == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5JNIFatalError( env, "H5Lcreate_hard:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Lmove((hid_t)cur_loc_id, (const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+
+    herr_t H5L_iterate_cb(hid_t g_id, const char *name, const H5L_info_t *info, void *op_data) {
+        JNIEnv    *cbenv;
+        jint       status;
+        jclass     cls;
+        jmethodID  mid;
+        jstring    str;
+        jmethodID  constructor;
+        jvalue     args[5];
+        jobject    cb_info_t = NULL;
+
+        if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+            /* printf("JNI H5L_iterate_cb error: AttachCurrentThread failed\n"); */
+            JVMPTR->DetachCurrentThread(JVMPAR);
+            return -1;
+        }
+        cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+        if (cls == 0) {
+            /* printf("JNI H5L_iterate_cb error: GetObjectClass failed\n"); */
+           JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(ILjava/lang/String;Lncsa/hdf/hdf5lib/structs/H5L_info_t;Lncsa/hdf/hdf5lib/callbacks/H5L_iterate_t;)I");
+        if (mid == 0) {
+            /* printf("JNI H5L_iterate_cb error: GetMethodID failed\n"); */
+            JVMPTR->DetachCurrentThread(JVMPAR);
+            return -1;
+        }
+        str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+        // get a reference to your class if you don't have it already
+        cls = CBENVPTR->FindClass(CBENVPAR "ncsa/hdf/hdf5lib/structs/H5L_info_t");
+        if (cls == 0) {
+            /* printf("JNI H5L_iterate_cb error: GetObjectClass info failed\n"); */
+           JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(IZJIJ)V");
+        if (constructor == 0) {
+            /* printf("JNI H5L_iterate_cb error: GetMethodID constructor failed\n"); */
+            JVMPTR->DetachCurrentThread(JVMPAR);
+            return -1;
+        }
+        args[0].i = info->type;
+        args[1].z = info->corder_valid;
+        args[2].j = info->corder;
+        args[3].i = info->cset;
+        if(info->type==0)
+            args[4].j = info->u.address;
+        else
+            args[4].j = info->u.val_size;
+        cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+        status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, g_id, str, cb_info_t, op_data);
+
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lvisit
+     * Signature: (IIILjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lvisit
+      (JNIEnv *env, jclass clss, jint grp_id, jint idx_type, jint order,
+              jobject callback_op, jobject op_data)
+    {
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+
+        if (op_data == NULL) {
+            h5nullArgument(env, "H5Lvisit:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env, "H5Lvisit:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Lvisit((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lvisit_by_name
+     * Signature: (ILjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lvisit_1by_1name
+      (JNIEnv *env, jclass clss, jint grp_id, jstring name, jint idx_type, jint order,
+              jobject callback_op, jobject op_data, jint access_id)
+    {
+        jboolean      isCopy;
+        char         *lName;
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+        
+        if (name == NULL) {
+            h5nullArgument(env, "H5Lvisit_by_name:  name is NULL");
+            return -1;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Lvisit_by_name:  name not pinned");
+            return -1;
+        }
+
+        if (op_data == NULL) {
+            h5nullArgument(env, "H5Lvisit_by_name:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env, "H5Lvisit_by_name:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Lvisit_by_name((hid_t)grp_id, (const char*)lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Literate
+     * Signature: (IIIJLjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Literate
+      (JNIEnv *env, jclass clss, jint grp_id, jint idx_type, jint order,
+              jlong idx, jobject callback_op, jobject op_data)
+    {
+        hsize_t       start_idx = (hsize_t)idx;
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+
+        if (op_data == NULL) {
+            h5nullArgument(env,  "H5Literate:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env,  "H5Literate:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Literate((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Literate_by_name
+     * Signature: (ILjava/lang/String;IIJLjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Literate_1by_1name
+      (JNIEnv *env, jclass clss, jint grp_id, jstring name, jint idx_type, jint order,
+              jlong idx, jobject callback_op, jobject op_data, jint access_id)
+    {
+        jboolean      isCopy;
+        char         *lName;
+        hsize_t       start_idx = (hsize_t)idx;
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+        
+        if (name == NULL) {
+            h5nullArgument(env, "H5Literate_by_name:  name is NULL");
+            return -1;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Literate_by_name:  name not pinned");
+            return -1;
+        }
+
+        if (op_data == NULL) {
+            h5nullArgument(env,  "H5Literate_by_name:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env,  "H5Literate_by_name:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Literate_by_name((hid_t)grp_id, (const char*)lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5lImp.h b/source/c/hdf-java/h5lImp.h
new file mode 100644
index 0000000..69046ed
--- /dev/null
+++ b/source/c/hdf-java/h5lImp.h
@@ -0,0 +1,153 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5_H5L */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5L
+#define _Included_ncsa_hdf_hdf5lib_H5_H5L
+#ifdef __cplusplus
+extern "C" {
+#endif
+    
+    extern JavaVM *jvm;
+    extern jobject visit_callback;   
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcopy
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcopy
+      (JNIEnv *, jclass, jint, jstring, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_external
+     * Signature: (Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1external
+      (JNIEnv *, jclass, jstring, jstring, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_hard
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1hard
+      (JNIEnv *, jclass, jint, jstring, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lcreate_soft
+     * Signature: (Ljava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lcreate_1soft
+      (JNIEnv *, jclass, jstring, jint, jstring, jint, jint);
+  
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ldelete
+     * Signature: (ILjava/lang/String;I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ldelete
+      (JNIEnv *, jclass, jint, jstring, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ldelete_by_idx
+     * Signature: (ILjava/lang/String;IIJI)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ldelete_1by_1idx
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lexists
+     * Signature: (ILjava/lang/String;I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lexists
+      (JNIEnv *, jclass, jint, jstring, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_info
+     * Signature: (ILjava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5L_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1info
+      (JNIEnv *, jclass, jint, jstring, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_info_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5L_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_name_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Ljava/lang/String;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_val
+     * Signature: (ILjava/lang/String;[Ljava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1val
+      (JNIEnv *, jclass, jint, jstring, jobjectArray, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lget_val_by_idx
+     * Signature: (ILjava/lang/String;IIJ[Ljava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lget_1val_1by_1idx
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jobjectArray, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lmove
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lmove
+      (JNIEnv *, jclass, jint, jstring, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lvisit
+     * Signature: (IIILjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lvisit
+      (JNIEnv *, jclass, jint, jint, jint, jobject, jobject);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Lvisit_by_name
+     * Signature: (ILjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Lvisit_1by_1name
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jobject, jobject, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Literate
+     * Signature: (IIIJLjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Literate
+      (JNIEnv *, jclass, jint, jint, jint, jlong, jobject, jobject);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Literate_by_name
+     * Signature: (ILjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Literate_1by_1name
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jobject, jobject, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5oImp.c b/source/c/hdf-java/h5oImp.c
new file mode 100755
index 0000000..a3feed0
--- /dev/null
+++ b/source/c/hdf-java/h5oImp.c
@@ -0,0 +1,891 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Link Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5oImp.h"
+
+#ifdef __cplusplus
+#define CBENVPTR (cbenv)
+#define CBENVPAR 
+#define JVMPTR (jvm)
+#define JVMPAR 
+#define JVMPAR2 
+#else
+#define CBENVPTR (*cbenv)
+#define CBENVPAR cbenv,
+#define JVMPTR (*jvm)
+#define JVMPAR jvm
+#define JVMPAR2 jvm,
+#endif
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    _H5Oopen
+     * Signature: (ILjava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Oopen
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+    {
+        hid_t    status;
+        char*    oName;
+        jboolean isCopy;
+
+        if (name == NULL) {
+            h5nullArgument( env, "H5Oopen:  name is NULL");
+            return -1;
+        }
+
+        oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+
+        if (oName == NULL) {
+            h5JNIFatalError( env, "H5Oopen:  object name not pinned");
+            return -1;
+        }
+
+        status = H5Oopen((hid_t)loc_id, oName, (hid_t)access_plist_id );
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+        if (status < 0) {
+            h5libraryError(env);
+        }
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    _H5Oclose
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Oclose
+      (JNIEnv *env, jclass clss, jint object_id)
+    {
+        herr_t retVal =  H5Oclose((hid_t)object_id) ;
+
+        if (retVal < 0) {
+            h5libraryError(env);
+        }
+
+        return (jint)retVal;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ocopy
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ocopy
+      (JNIEnv *env, jclass clss, jint cur_loc_id, jstring cur_name, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lCurName;
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (cur_name == NULL) {
+            h5nullArgument(env, "H5Ocopy:  cur_name is NULL");
+            return;
+        }
+        
+        lCurName = (char*)ENVPTR->GetStringUTFChars(ENVPAR cur_name, &isCopy);
+        if (lCurName == NULL) {
+            h5JNIFatalError(env, "H5Ocopy:  cur_name not pinned");
+            return;
+        }
+        
+        if (dst_name == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5nullArgument(env, "H5Ocopy:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+        if (lDstName == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+            h5JNIFatalError(env, "H5Ocopy:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Ocopy((hid_t)cur_loc_id, (const char*)lCurName, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR cur_name, lCurName);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info
+     * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info
+    (JNIEnv *env, jclass clss, jint loc_id)
+    {
+        herr_t      status;
+        H5O_info_t  infobuf;
+        jclass      cls;
+        jmethodID   constructor;
+        jvalue      args[12];
+        jobject     hdrinfobuf;
+        jobject     ihinfobuf1;
+        jobject     ihinfobuf2;
+        jobject     ret_info_t = NULL;
+
+        status = H5Oget_info((hid_t)loc_id, (H5O_info_t*)&infobuf);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return NULL;
+        }
+
+        // get a reference to the H5_hdr_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IIIIJJJJJJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        args[0].i = infobuf.hdr.version;
+        args[1].i = infobuf.hdr.nmesgs;
+        args[2].i = infobuf.hdr.nchunks;
+        args[3].i = infobuf.hdr.flags;
+        args[4].j = infobuf.hdr.space.total;
+        args[5].j = infobuf.hdr.space.meta;
+        args[6].j = infobuf.hdr.space.mesg;
+        args[7].j = infobuf.hdr.space.free;
+        args[8].j = infobuf.hdr.mesg.present;
+        args[9].j = infobuf.hdr.mesg.shared;
+        hdrinfobuf = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5_ih_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5_ih_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5_ih_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5_ih_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.meta_size.obj.index_size;
+        args[1].j = infobuf.meta_size.obj.heap_size;
+        ihinfobuf1 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+        args[0].j = infobuf.meta_size.attr.index_size;
+        args[1].j = infobuf.meta_size.attr.heap_size;
+        ihinfobuf2 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5O_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJIIJJJJJLncsa/hdf/hdf5lib/structs/H5O_hdr_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.fileno;
+        args[1].j = infobuf.addr;
+        args[2].i = infobuf.type;
+        args[3].i = infobuf.rc;
+        args[4].j = infobuf.num_attrs;
+        args[5].j = infobuf.atime;
+        args[6].j = infobuf.mtime;
+        args[7].j = infobuf.ctime;
+        args[8].j = infobuf.btime;
+        args[9].l = hdrinfobuf;
+        args[10].l = ihinfobuf1;
+        args[11].l = ihinfobuf2;
+        ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        return ret_info_t;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info_by_name
+     * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info_1by_1name
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id)
+    {
+        char       *lName;
+        herr_t      status;
+        H5O_info_t  infobuf;
+        jboolean    isCopy;
+        jclass      cls;
+        jmethodID   constructor;
+        jvalue      args[12];
+        jobject     hdrinfobuf;
+        jobject     ihinfobuf1;
+        jobject     ihinfobuf2;
+        jobject     ret_info_t = NULL;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Oget_info_by_name:  name is NULL");
+            return NULL;
+        }
+
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Oget_info_by_name:  name not pinned");
+            return NULL;
+        }
+
+        status = H5Oget_info_by_name((hid_t)loc_id, (const char*)lName, (H5O_info_t*)&infobuf, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return NULL;
+        }
+
+        // get a reference to the H5_hdr_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IIIIJJJJJJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        args[0].i = infobuf.hdr.version;
+        args[1].i = infobuf.hdr.nmesgs;
+        args[2].i = infobuf.hdr.nchunks;
+        args[3].i = infobuf.hdr.flags;
+        args[4].j = infobuf.hdr.space.total;
+        args[5].j = infobuf.hdr.space.meta;
+        args[6].j = infobuf.hdr.space.mesg;
+        args[7].j = infobuf.hdr.space.free;
+        args[8].j = infobuf.hdr.mesg.present;
+        args[9].j = infobuf.hdr.mesg.shared;
+        hdrinfobuf = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5_ih_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5_ih_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5_ih_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5_ih_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.meta_size.obj.index_size;
+        args[1].j = infobuf.meta_size.obj.heap_size;
+        ihinfobuf1 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+        args[0].j = infobuf.meta_size.attr.index_size;
+        args[1].j = infobuf.meta_size.attr.heap_size;
+        ihinfobuf2 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5O_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJIIJJJJJLncsa/hdf/hdf5lib/structs/H5O_hdr_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.fileno;
+        args[1].j = infobuf.addr;
+        args[2].i = infobuf.type;
+        args[3].i = infobuf.rc;
+        args[4].j = infobuf.num_attrs;
+        args[5].j = infobuf.atime;
+        args[6].j = infobuf.mtime;
+        args[7].j = infobuf.ctime;
+        args[8].j = infobuf.btime;
+        args[9].l = hdrinfobuf;
+        args[10].l = ihinfobuf1;
+        args[11].l = ihinfobuf2;
+        ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        return ret_info_t;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info_1by_1idx
+    (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint index_field, jint order, jlong link_n, jint access_id)
+    {
+        char       *lName;
+        herr_t      status;
+        H5O_info_t  infobuf;
+        jboolean    isCopy;
+        jclass      cls;
+        jmethodID   constructor;
+        jvalue      args[12];
+        jobject     hdrinfobuf;
+        jobject     ihinfobuf1;
+        jobject     ihinfobuf2;
+        jobject     ret_info_t = NULL;
+
+        if (name == NULL) {
+            h5nullArgument(env, "H5Oget_info_by_idx:  name is NULL");
+            return NULL;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Oget_info_by_idx:  name not pinned");
+            return NULL;
+        }
+
+        status = H5Oget_info_by_idx((hid_t)loc_id, (const char*)lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (H5O_info_t*)&infobuf, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+
+        if (status < 0) {
+           h5libraryError(env);
+           return NULL;
+        }
+
+        // get a reference to the H5_hdr_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IIIIJJJJJJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_hdr_info_t failed\n");
+           return NULL;
+        }
+        args[0].i = infobuf.hdr.version;
+        args[1].i = infobuf.hdr.nmesgs;
+        args[2].i = infobuf.hdr.nchunks;
+        args[3].i = infobuf.hdr.flags;
+        args[4].j = infobuf.hdr.space.total;
+        args[5].j = infobuf.hdr.space.meta;
+        args[6].j = infobuf.hdr.space.mesg;
+        args[7].j = infobuf.hdr.space.free;
+        args[8].j = infobuf.hdr.mesg.present;
+        args[9].j = infobuf.hdr.mesg.shared;
+        hdrinfobuf = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5_ih_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5_ih_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5_ih_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJ)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5_ih_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.meta_size.obj.index_size;
+        args[1].j = infobuf.meta_size.obj.heap_size;
+        ihinfobuf1 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+        args[0].j = infobuf.meta_size.attr.index_size;
+        args[1].j = infobuf.meta_size.attr.heap_size;
+        ihinfobuf2 = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        // get a reference to the H5O_info_t class
+        cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5O_info_t");
+        if (cls == 0) {
+           h5JNIFatalError( env, "JNI error: GetObjectClass H5O_info_t failed\n");
+           return NULL;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(JJIIJJJJJLncsa/hdf/hdf5lib/structs/H5O_hdr_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;)V");
+        if (constructor == 0) {
+            h5JNIFatalError( env, "JNI error: GetMethodID H5O_info_t failed\n");
+           return NULL;
+        }
+        args[0].j = infobuf.fileno;
+        args[1].j = infobuf.addr;
+        args[2].i = infobuf.type;
+        args[3].i = infobuf.rc;
+        args[4].j = infobuf.num_attrs;
+        args[5].j = infobuf.atime;
+        args[6].j = infobuf.mtime;
+        args[7].j = infobuf.ctime;
+        args[8].j = infobuf.btime;
+        args[9].l = hdrinfobuf;
+        args[10].l = ihinfobuf1;
+        args[11].l = ihinfobuf2;
+        ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+
+        return ret_info_t;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Olink
+     * Signature: (IILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Olink
+      (JNIEnv *env, jclass clss, jint cur_loc_id, jint dst_loc_id, jstring dst_name, jint create_id, jint access_id)
+    {
+        char    *lDstName;
+        jboolean isCopy;
+        herr_t   status = -1;
+        
+        if (dst_name == NULL) {
+            h5nullArgument(env, "H5Ocreate_hard:  dst_name is NULL");
+            return;
+        }
+        
+        lDstName = (char*)ENVPTR->GetStringUTFChars(ENVPAR dst_name, &isCopy);
+        if (lDstName == NULL) {
+            h5JNIFatalError( env, "H5Ocreate_hard:  dst_name not pinned");
+            return;
+        }
+
+        status = H5Olink((hid_t)cur_loc_id, (hid_t)dst_loc_id, (const char*)lDstName, (hid_t)create_id, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR dst_name, lDstName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return;
+        }
+        
+        return;
+    }
+
+    herr_t H5O_iterate_cb(hid_t g_id, const char *name, const H5O_info_t *info, void *op_data) {
+        JNIEnv    *cbenv;
+        jint       status;
+        jclass     cls;
+        jmethodID  mid;
+        jstring    str;
+        jmethodID  constructor;
+        jvalue     args[12];
+        jobject    hdrinfobuf;
+        jobject    ihinfobuf1;
+        jobject    ihinfobuf2;
+        jobject    cb_info_t = NULL;
+
+        if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+            /* printf("JNI H5O_iterate_cb error: AttachCurrentThread failed\n"); */
+            JVMPTR->DetachCurrentThread(JVMPAR);
+            return -1;
+        }
+        cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+        if (cls == 0) {
+            /* printf("JNI H5O_iterate_cb error: GetObjectClass failed\n"); */
+           JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(ILjava/lang/String;Lncsa/hdf/hdf5lib/structs/H5O_info_t;Lncsa/hdf/hdf5lib/callbacks/H5O_iterate_t;)I");
+        if (mid == 0) {
+            /* printf("JNI H5O_iterate_cb error: GetMethodID failed\n"); */
+            JVMPTR->DetachCurrentThread(JVMPAR);
+            return -1;
+        }
+        str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+        // get a reference to the H5_hdr_info_t class
+        cls = CBENVPTR->FindClass(CBENVPAR "ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t");
+        if (cls == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(IIIIJJJJJJ)V");
+        if (constructor == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        args[0].i = info->hdr.version;
+        args[1].i = info->hdr.nmesgs;
+        args[2].i = info->hdr.nchunks;
+        args[3].i = info->hdr.flags;
+        args[4].j = info->hdr.space.total;
+        args[5].j = info->hdr.space.meta;
+        args[6].j = info->hdr.space.mesg;
+        args[7].j = info->hdr.space.free;
+        args[8].j = info->hdr.mesg.present;
+        args[9].j = info->hdr.mesg.shared;
+        hdrinfobuf = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+        // get a reference to the H5_ih_info_t class
+        cls = CBENVPTR->FindClass(CBENVPAR "ncsa/hdf/hdf5lib/structs/H5_ih_info_t");
+        if (cls == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(JJ)V");
+        if (constructor == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        args[0].j = info->meta_size.obj.index_size;
+        args[1].j = info->meta_size.obj.heap_size;
+        ihinfobuf1 = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+        args[0].j = info->meta_size.attr.index_size;
+        args[1].j = info->meta_size.attr.heap_size;
+        ihinfobuf2 = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+        // get a reference to the H5O_info_t class
+        cls = CBENVPTR->FindClass(CBENVPAR "ncsa/hdf/hdf5lib/structs/H5O_info_t");
+        if (cls == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        // get a reference to the constructor; the name is <init>
+        constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(JJIIJJJJJLncsa/hdf/hdf5lib/structs/H5O_hdr_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;Lncsa/hdf/hdf5lib/structs/H5_ih_info_t;)V");
+        if (constructor == 0) {
+            JVMPTR->DetachCurrentThread(JVMPAR);
+           return -1;
+        }
+        args[0].j = info->fileno;
+        args[1].j = info->addr;
+        args[2].i = info->type;
+        args[3].i = info->rc;
+        args[4].j = info->num_attrs;
+        args[5].j = info->atime;
+        args[6].j = info->mtime;
+        args[7].j = info->ctime;
+        args[8].j = info->btime;
+        args[9].l = hdrinfobuf;
+        args[10].l = ihinfobuf1;
+        args[11].l = ihinfobuf2;
+        cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+        status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, g_id, str, cb_info_t, op_data);
+
+        JVMPTR->DetachCurrentThread(JVMPAR);
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ovisit
+     * Signature: (IIILjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ovisit
+      (JNIEnv *env, jclass clss, jint grp_id, jint idx_type, jint order,
+              jobject callback_op, jobject op_data)
+    {
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+
+        if (op_data == NULL) {
+            h5nullArgument(env, "H5Ovisit:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env, "H5Ovisit:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Ovisit((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5O_iterate_t)H5O_iterate_cb, (void*)op_data);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ovisit_by_name
+     * Signature: (ILjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ovisit_1by_1name
+      (JNIEnv *env, jclass clss, jint grp_id, jstring name, jint idx_type, jint order,
+              jobject callback_op, jobject op_data, jint access_id)
+    {
+        jboolean      isCopy;
+        char         *lName;
+        herr_t        status = -1;
+        
+        ENVPTR->GetJavaVM(ENVPAR &jvm);
+        visit_callback = callback_op;
+        
+        if (name == NULL) {
+            h5nullArgument(env, "H5Ovisit_by_name:  name is NULL");
+            return -1;
+        }
+        
+        lName = (char*)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (lName == NULL) {
+            h5JNIFatalError(env, "H5Ovisit_by_name:  name not pinned");
+            return -1;
+        }
+
+        if (op_data == NULL) {
+            h5nullArgument(env, "H5Ovisit_by_name:  op_data is NULL");
+            return -1;
+        }
+        if (callback_op == NULL) {
+            h5nullArgument(env, "H5Ovisit_by_name:  callback_op is NULL");
+            return -1;
+        }
+        
+        status = H5Ovisit_by_name((hid_t)grp_id, (const char*)lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5O_iterate_t)H5O_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, lName);
+        
+        if (status < 0) {
+           h5libraryError(env);
+           return status;
+        }
+        
+        return status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oset_comment
+     * Signature: (ILjava/lang/String;)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oset_1comment
+      (JNIEnv *env, jclass clss, jint loc_id, jstring comment)
+    {
+        herr_t  status;
+        char    *oComment;
+        jboolean isCopy;
+
+        if (comment == NULL) {
+            oComment = NULL;
+        }
+        else {
+            oComment = (char *)ENVPTR->GetStringUTFChars(ENVPAR comment, &isCopy);
+            if (oComment == NULL) {
+                h5JNIFatalError( env, "H5Oset_comment:  comment not pinned");
+                return;
+            }
+        }
+
+        status = H5Oset_comment((hid_t)loc_id, (const char*)oComment);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR comment, oComment);
+
+        if (status < 0) {
+            h5libraryError(env);
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oset_comment_by_name
+     * Signature: (ILjava/lang/String;Ljava/lang/String;I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oset_1comment_1by_1name
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jstring comment, jint access_id)
+    {
+        herr_t   status;
+        char    *oName;
+        char    *oComment;
+        jboolean isCopy;
+
+        if (name == NULL) {
+            h5nullArgument( env, "H5Oset_comment_by_name:  name is NULL");
+            return;
+        }
+        oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (oName == NULL) {
+            h5JNIFatalError( env, "H5Oset_comment_by_name:  name not pinned");
+            return;
+        }
+        if (comment == NULL) {
+            oComment = NULL;
+        }
+        else {
+            oComment = (char *)ENVPTR->GetStringUTFChars(ENVPAR comment,&isCopy);
+            if (oComment == NULL) {
+                ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+                h5JNIFatalError( env, "H5Oset_comment_by_name:  comment not pinned");
+                return;
+            }
+        }
+
+        status = H5Oset_comment_by_name((hid_t)loc_id, (const char*)oName, (const char*)oComment, (hid_t)access_id);
+
+        ENVPTR->ReleaseStringUTFChars(ENVPAR comment, oComment);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+
+        if (status < 0) {
+            h5libraryError(env);
+        }
+        return;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_comment
+     * Signature: (I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1comment
+      (JNIEnv *env, jclass clss, jint loc_id)
+    {
+        char   *oComment;
+        size_t  buf_size;
+        ssize_t status;
+        jstring str;
+
+        /* get the length of the comment */
+        buf_size = H5Oget_comment((hid_t)loc_id, NULL, 0);
+        if (buf_size < 0) {
+            h5badArgument( env, "H5Oget_comment:  buf_size < 0");
+            return NULL;
+        }
+        if (buf_size == 0) {
+            return NULL;
+        }
+
+        buf_size++; /* add extra space for the null terminator */
+        oComment = (char *)malloc(sizeof(char)*buf_size);
+        if (oComment == NULL) {
+            /* exception -- out of memory */
+            h5outOfMemory( env, "H5Oget_comment:  malloc failed");
+            return NULL;
+        }
+
+        status = H5Oget_comment((hid_t)loc_id, (char*)oComment, (size_t)buf_size);
+
+        if (status >= 0) {
+            /*  may throw OutOfMemoryError */
+            str = ENVPTR->NewStringUTF(ENVPAR oComment);
+            free(oComment);
+            if (str == NULL) {
+                h5JNIFatalError( env, "H5Oget_comment:  return string not allocated");
+                return NULL;
+            }
+        } 
+        else {
+            free(oComment);
+            h5libraryError(env);
+            return NULL;
+        }
+
+        return (jstring)str;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_comment_by_name
+     * Signature: (ILjava/lang/String;I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1comment_1by_1name
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id)
+    {
+        char    *oComment;
+        char    *oName;
+        size_t   buf_size;
+        ssize_t  status;
+        jstring  str;
+        jboolean isCopy;
+
+        if (name == NULL) {
+            h5nullArgument( env, "H5Oget_comment_by_name:  name is NULL");
+            return NULL;
+        }
+        oName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+        if (oName == NULL) {
+            h5JNIFatalError( env, "H5Oget_comment_by_name:  name not pinned");
+            return NULL;
+        }
+
+        /* get the length of the comment */
+        buf_size = H5Oget_comment_by_name((hid_t)loc_id, (const char*)oName, NULL, 0, (hid_t)access_id);
+        if (buf_size < 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+            h5badArgument( env, "H5Oget_comment_by_name:  buf_size < 0");
+            return NULL;
+        }
+        if (buf_size == 0) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+            return NULL;
+        }
+
+        buf_size++; /* add extra space for the null terminator */
+        oComment = (char *)malloc(sizeof(char)*buf_size);
+        if (oComment == NULL) {
+            ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+            h5outOfMemory( env, "H5Oget_comment_by_name:  malloc failed");
+            return NULL;
+        }
+
+        status = H5Oget_comment_by_name((hid_t)loc_id, (const char*)oName, (char*)oComment, (size_t)buf_size, (hid_t)access_id);
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, oName);
+
+        if (status >= 0) {
+            /*  may throw OutOfMemoryError */
+            str = ENVPTR->NewStringUTF(ENVPAR oComment);
+            free(oComment);
+            if (str == NULL) {
+                h5JNIFatalError( env, "H5Oget_comment_by_name:  return string not allocated");
+                return NULL;
+            }
+        } 
+        else {
+            free(oComment);
+            h5libraryError(env);
+            return NULL;
+        }
+
+        return (jstring)str;
+    }
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5oImp.h b/source/c/hdf-java/h5oImp.h
new file mode 100644
index 0000000..747d4c5
--- /dev/null
+++ b/source/c/hdf-java/h5oImp.h
@@ -0,0 +1,122 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5_H5O */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5O
+#define _Included_ncsa_hdf_hdf5lib_H5_H5O
+#ifdef __cplusplus
+extern "C" {
+#endif
+    
+    extern JavaVM *jvm;
+    extern jobject visit_callback;   
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    _H5Oopen
+     * Signature: (ILjava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Oopen
+      (JNIEnv *, jclass, jint, jstring, jint);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    _H5Oclose
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Oclose
+      (JNIEnv *, jclass, jint);
+
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ocopy
+     * Signature: (ILjava/lang/String;ILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ocopy
+      (JNIEnv *, jclass, jint, jstring, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info
+     * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info
+      (JNIEnv *, jclass, jint);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info_by_name
+     * Signature: (ILjava/lang/String;I)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info_1by_1name
+    (JNIEnv *, jclass, jint, jstring, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_info_by_idx
+     * Signature: (ILjava/lang/String;IIJI)Lncsa/hdf/hdf5lib/structs/H5O_info_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1info_1by_1idx
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jlong, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Olink
+     * Signature: (IILjava/lang/String;II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Olink
+      (JNIEnv *, jclass, jint, jint, jstring, jint, jint);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ovisit
+     * Signature: (IIILjava/lang/Object;Ljava/lang/Object;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ovisit
+      (JNIEnv *, jclass, jint, jint, jint, jobject, jobject);
+    
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Ovisit_by_name
+     * Signature: (ILjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Ovisit_1by_1name
+      (JNIEnv *, jclass, jint, jstring, jint, jint, jobject, jobject, jint);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oset_comment
+     * Signature: (ILjava/lang/String;)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oset_1comment
+      (JNIEnv *env, jclass clss, jint loc_id, jstring comment);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oset_comment_by_name
+     * Signature: (ILjava/lang/String;Ljava/lang/String;I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oset_1comment_1by_1name
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jstring comment, jint access_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_comment
+     * Signature: (I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1comment
+      (JNIEnv *env, jclass clss, jint loc_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Oget_comment_by_name
+     * Signature: (ILjava/lang/String;I)Ljava/lang/String;
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Oget_1comment_1by_1name
+      (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_id);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5pImp.c b/source/c/hdf-java/h5pImp.c
new file mode 100755
index 0000000..7d80b34
--- /dev/null
+++ b/source/c/hdf-java/h5pImp.c
@@ -0,0 +1,4972 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Property List API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5pImp.h"
+#include "h5util.h"
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcreate
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pcreate
+  (JNIEnv *env, jclass clss, jint type)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pcreate((hid_t)type);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pclose
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = 0;
+
+    if (plist > 0)
+        retVal = H5Pclose((hid_t)plist);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal = H5P_NO_CLASS;
+
+    retVal = H5Pget_class((hid_t) plist);
+    if (retVal == H5P_NO_CLASS) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pcopy
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pcopy((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_version
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1version
+  (JNIEnv *env, jclass clss, jint plist, jintArray version_info)
+{
+    herr_t   status;
+    jint    *theArray;
+    jboolean isCopy;
+
+    if (version_info == NULL) {
+        h5nullArgument(env, "H5Pget_version:  version_info input array is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR version_info) < 4) {
+        h5badArgument(env, "H5Pget_version:  version_info input array < 4");
+        return -1;
+    }
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR version_info,&isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_version:  version_info not pinned");
+        return -1;
+    }
+
+    status = H5Pget_version((hid_t)plist, (unsigned *)&(theArray[0]),
+            (unsigned *)&(theArray[1]), (unsigned *)&(theArray[2]), (unsigned *)&(theArray[3]));
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR version_info, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR version_info, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_userblock
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1userblock
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    long   sz;
+    herr_t retVal = -1;
+
+    sz = (long)size;
+    retVal = H5Pset_userblock((hid_t)plist, (hsize_t)sz);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_userblock
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1userblock
+  (JNIEnv *env, jclass clss, jint plist, jlongArray size)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    hsize_t  s;
+
+    if (size == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pget_userblock:  size is NULL");
+        return -1;
+    }
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_userblock:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_userblock((hid_t)plist, &s);
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = s;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sizes
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sizes
+  (JNIEnv *env, jclass clss, jint plist, jint sizeof_addr, jint sizeof_size)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_sizes((hid_t)plist, (size_t)sizeof_addr, (size_t)sizeof_size);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sizes
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sizes
+  (JNIEnv *env, jclass clss, jint plist, jlongArray size)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    size_t   ss;
+    size_t   sa;
+
+    if (size == NULL) {
+        h5nullArgument(env, "H5Pget_sizes:  size is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+        h5badArgument(env, "H5Pget_sizes:  size input array < 2 elements");
+        return -1;
+    }
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_sizes:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_sizes((hid_t)plist, &sa, &ss);
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = sa;
+    theArray[1] = ss;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sym_k
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sym_1k
+  (JNIEnv *env, jclass clss, jint plist, jint ik, jint lk)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_sym_k((hid_t)plist, (int)ik, (int)lk);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sym_k
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sym_1k
+  (JNIEnv *env, jclass clss, jint plist, jintArray size)
+{
+    herr_t   status;
+    jint    *theArray;
+    jboolean isCopy;
+
+    if (size == NULL) {
+        h5nullArgument(env, "H5Pget_sym_k:  size is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+        h5badArgument(env, "H5Pget_sym_k:  size < 2 elements");
+        return -1;
+    }
+
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_sym_k:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_sym_k((hid_t)plist, (unsigned *)&(theArray[0]), (unsigned *)&(theArray[1]));
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR size, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR size, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_istore_k
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1istore_1k
+  (JNIEnv *env, jclass clss, jint plist, jint ik)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_istore_k((hid_t)plist, (int)ik);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_istore_k
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1istore_1k
+  (JNIEnv *env, jclass clss, jint plist, jintArray ik)
+{
+    herr_t   status;
+    jint    *theArray;
+    jboolean isCopy;
+
+    if (ik == NULL) {
+        h5nullArgument(env, "H5Pget_store_k:  ik is NULL");
+        return -1;
+    }
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR ik, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_store_k:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_istore_k((hid_t)plist, (unsigned *)&(theArray[0]));
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR ik, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR ik, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_layout
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1layout
+  (JNIEnv *env, jclass clss, jint plist, jint layout)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_layout((hid_t)plist, (H5D_layout_t)layout);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_layout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1layout
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5D_layout_t retVal = H5D_LAYOUT_ERROR;
+
+    retVal = H5Pget_layout((hid_t)plist);
+    if (retVal == H5D_LAYOUT_ERROR) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_chunk
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1chunk
+  (JNIEnv *env, jclass clss, jint plist, jint ndims, jbyteArray dim)
+{
+    herr_t   status;
+    jbyte   *theArray;
+    jboolean isCopy;
+    hsize_t *da;
+    hsize_t *lp;
+    jlong   *jlp;
+    int      i;
+    int      rank;
+
+    if (dim == NULL) {
+        h5nullArgument(env, "H5Pset_chunk:  dim array is NULL");
+        return -1;
+    }
+    i = ENVPTR->GetArrayLength(ENVPAR dim);
+    rank = i / sizeof(jlong);
+    if (rank < ndims) {
+        h5badArgument(env, "H5Pset_chunk:  dims array < ndims");
+        return -1;
+    }
+    theArray = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR dim, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pset_chunk:  dim array not pinned");
+        return -1;
+    }
+    da = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (da == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR dim, theArray, JNI_ABORT);
+        h5JNIFatalError(env,  "H5Pset_chunk:  dims not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)theArray;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Pset_chunk((hid_t)plist, (int)ndims, da);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR dim, theArray, JNI_ABORT);
+    free(da);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_chunk
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1chunk
+  (JNIEnv *env, jclass clss, jint plist, jint max_ndims, jlongArray dims)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    hsize_t *da;
+    int      i;
+
+    if (dims == NULL) {
+        h5nullArgument(env, "H5Pget_chunk:  dims is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR dims) < max_ndims) {
+        h5badArgument(env, "H5Pget_chunk:  dims array < max_ndims");
+        return -1;
+    }
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_chunk:  input dims not pinned");
+        return -1;
+    }
+    da = (hsize_t *)malloc(max_ndims * sizeof(hsize_t));
+    if (da == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, JNI_ABORT);
+        h5JNIFatalError(env, "H5Pget_chunk:  dims not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Pget_chunk((hid_t)plist, (int)max_ndims, da);
+
+    if (status < 0)  {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, JNI_ABORT);
+        free (da);
+        h5libraryError(env);
+        return -1;
+    }
+    for (i = 0; i < max_ndims; i++) {
+        theArray[i] = da[i];
+    }
+    free (da);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_alignment
+ * Signature: (IJJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1alignment
+  (JNIEnv *env, jclass clss, jint plist, jlong threshold, jlong alignment)
+{
+    long   thr;
+    long   align;
+    herr_t retVal = -1;
+
+    thr = (long)threshold;
+    align = (long)alignment;
+
+    retVal = H5Pset_alignment((hid_t)plist, (hsize_t)thr, (hsize_t)align);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_alignment
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1alignment
+  (JNIEnv *env, jclass clss, jint plist, jlongArray alignment)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    hsize_t  t;
+    hsize_t  a;
+
+    if (alignment == NULL) {
+        h5nullArgument(env, "H5Pget_alignment:  input alignment is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR alignment) < 2) {
+        h5badArgument(env, "H5Pget_alignment:  allingment input array < 2");
+        return -1;
+    }
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR alignment, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_alignment:  input array not pinned");
+        return -1;
+    }
+
+    status = H5Pget_alignment((hid_t)plist, &t, &a);
+    if (status < 0)  {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR alignment, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = t;
+    theArray[1] = a;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR alignment, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_external
+ * Signature: (ILjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1external
+  (JNIEnv *env, jclass clss, jint plist, jstring name, jlong offset, jlong size)
+{
+    herr_t   status;
+    char    *file;
+    jboolean isCopy;
+    long     off;
+    long     sz;
+
+    off = (long)offset;
+    sz = (long)size;
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pset_external:  name is NULL");
+        return -1;
+    }
+    file = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (file == NULL) {
+        h5JNIFatalError(env, "H5Pset_external:  name not pinned");
+        return -1;
+    }
+
+    status = H5Pset_external((hid_t)plist, file, (off_t)off, (hsize_t)sz);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, file);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_external_count
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1external_1count
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    int retVal = -1;
+
+    retVal = H5Pget_external_count((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_external
+ * Signature: (IIJ[Ljava/lang/String;[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1external
+  (JNIEnv *env, jclass clss, jint plist, jint idx, jlong name_size,
+  jobjectArray name, jlongArray size)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    char    *file;
+    jstring  str;
+    off_t    o;
+    hsize_t  s;
+
+    if (name_size < 0) {
+        h5badArgument(env, "H5Pget_external:  name_size < 0");
+        return -1;
+    }
+    else if (name_size == 0) {
+        file = NULL;
+    }
+    else {
+        file = (char *)malloc(sizeof(char)*(size_t)name_size);
+    }
+
+    if (size != NULL) {
+        if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+            free(file);
+            h5badArgument(env, "H5Pget_external:  size input array < 2");
+            return -1;
+        }
+        theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+        if (theArray == NULL) {
+            free(file);
+            h5JNIFatalError( env, "H5Pget_external:  size array not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pget_external((hid_t) plist, (int)idx, (size_t)name_size,
+            file, (off_t *)&o, (hsize_t *)&s);
+    if (status < 0) {
+        if (size != NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+        }
+        free(file);
+        h5libraryError(env);
+        return -1;
+    }
+
+    if (size != NULL) {
+        theArray[0] = o;
+        theArray[1] = s;
+        ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+    }
+
+    if (file != NULL) {
+        /*  NewStringUTF may throw OutOfMemoryError */
+        str = ENVPTR->NewStringUTF(ENVPAR file);
+        if (str == NULL) {
+            free(file);
+            h5JNIFatalError(env, "H5Pget_external:  return array not created");
+            return -1;
+        }
+        /*  SetObjectArrayElement may raise exceptions */
+        ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+        free(file);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fill_value
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fill_1value
+  (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value)
+{
+    jint     status = -1;
+    jbyte   *byteP;
+    jboolean isCopy;
+
+    if (value != NULL) {
+        byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+        if (byteP == NULL) {
+            h5JNIFatalError(env, "H5Pget_fill_value:  value array not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pset_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+    if (status < 0) {
+        if (value != NULL) {
+            ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+        }
+        h5libraryError(env);
+        return -1;
+    }
+
+    if (value != NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+    }
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fill_value
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fill_1value
+  (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value)
+{
+    jint     status;
+    jbyte   *byteP;
+    jboolean isCopy;
+
+    if (value == NULL) {
+        h5badArgument(env, "H5Pget_fill_value:  value is NULL");
+        return -1;
+    }
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+    if (byteP == NULL) {
+        h5JNIFatalError(env, "H5Pget_fill_value:  value array not pinned");
+        return -1;
+    }
+
+    status = H5Pget_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, 0);
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_filter
+ * Signature: (IIIJ[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+  jlong cd_nelmts, jintArray cd_values)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+
+    if (cd_values == NULL)
+        status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+                (unsigned int)flags, (size_t)cd_nelmts, NULL);
+    else {
+        theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+        if (theArray == NULL) {
+            h5JNIFatalError(env, "H5Pset_filter:  input array  not pinned");
+            return -1;
+        }
+        status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+                (unsigned int)flags, (size_t)cd_nelmts, (const unsigned int *)theArray);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, theArray, JNI_ABORT);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_nfilters
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nfilters
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    int retVal = -1;
+
+    retVal = H5Pget_nfilters((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter
+ * Signature: (II[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter_number, jintArray flags,
+  jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name)
+{
+    herr_t   status;
+    jint    *flagsArray;
+    jlong   *cd_nelmtsArray;
+    jint    *cd_valuesArray;
+    jboolean isCopy;
+    jstring  str;
+    char    *filter;
+
+    if (namelen <= 0) {
+        h5badArgument(env, "H5Pget_filter:  namelen <= 0");
+        return -1;
+    }
+    if (flags == NULL) {
+        h5badArgument(env, "H5Pget_filter:  flags is NULL");
+        return -1;
+    }
+    if (cd_nelmts == NULL) {
+        h5badArgument(env, "H5Pget_filter:  cd_nelmts is NULL");
+        return -1;
+    }
+    if (cd_values == NULL) {
+        h5badArgument(env, "H5Pget_filter:  cd_values is NULL");
+        return -1;
+    }
+    filter = (char *)malloc(sizeof(char)*(size_t)namelen);
+    if (filter == NULL) {
+        h5outOfMemory(env, "H5Pget_filter:  namelent malloc failed");
+        return -1;
+    }
+    flagsArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+    if (flagsArray == NULL) {
+        free(filter);
+        h5JNIFatalError(env, "H5Pget_filter:  flags array not pinned");
+        return -1;
+    }
+    cd_nelmtsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+    if (cd_nelmtsArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5JNIFatalError(env, "H5Pget_filter:  nelmts array not pinned");
+        return -1;
+    }
+    cd_valuesArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+    if (cd_valuesArray == NULL)  {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5JNIFatalError(env, "H5Pget_filter:  elmts array not pinned");
+        return -1;
+    }
+
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long cd_nelmts_temp = *(cd_nelmtsArray);
+        size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+        unsigned int filter_config;
+        status = H5Pget_filter2((hid_t)plist, (int)filter_number,
+            (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+            (size_t)namelen, filter, &filter_config);
+
+        *cd_nelmtsArray = cd_nelmts_t;
+    }
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+    /*  NewStringUTF may throw OutOfMemoryError */
+    str = ENVPTR->NewStringUTF(ENVPAR filter);
+    if (str == NULL) {
+        free(filter);
+        h5JNIFatalError(env, "H5Pget_filter:  return string not pinned");
+        return -1;
+    }
+    /*  SetObjectArrayElement may throw exceptiosn */
+    ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+    free(filter);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_driver
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1driver
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal =  -1;
+
+    retVal = H5Pget_driver((hid_t) plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_cache
+ * Signature: (IIJJD)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1cache
+  (JNIEnv *env, jclass clss, jint plist, jint mdc_nelmts, jlong rdcc_nelmts,
+  jlong rdcc_nbytes, jdouble rdcc_w0)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_cache((hid_t)plist, (int)mdc_nelmts, (size_t)rdcc_nelmts,
+                (size_t)rdcc_nbytes, (double) rdcc_w0);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_cache
+ * Signature: (I[I[J[J[D)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1cache
+  (JNIEnv *env, jclass clss, jint plist, jintArray mdc_nelmts,
+  jlongArray rdcc_nelmts, jlongArray rdcc_nbytes, jdoubleArray rdcc_w0)
+{
+    herr_t   status;
+    jint     mode;
+    jdouble *w0Array;
+    jlong   *rdcc_nelmtsArray;
+    jlong   *nbytesArray;
+    jboolean isCopy;
+
+    if (rdcc_w0 == NULL) {
+        w0Array = (jdouble *)NULL;
+    }
+    else {
+        w0Array = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR rdcc_w0, &isCopy);
+        if (w0Array == NULL) {
+            h5JNIFatalError(env, "H5Pget_cache:  w0_array array not pinned");
+            return -1;
+        }
+    }
+
+    if (rdcc_nelmts == NULL) {
+        rdcc_nelmtsArray = (jlong *) NULL;
+    }
+    else {
+        rdcc_nelmtsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nelmts, &isCopy);
+        if (rdcc_nelmtsArray == NULL) {
+            /* exception -- out of memory */
+            if (w0Array != NULL) {
+                ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+            }
+            h5JNIFatalError(env, "H5Pget_cache:  rdcc_nelmts array not pinned");
+            return -1;
+        }
+    }
+
+    if (rdcc_nbytes == NULL) {
+        nbytesArray = (jlong *) NULL;
+    }
+    else {
+        nbytesArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nbytes, &isCopy);
+        if (nbytesArray == NULL) {
+            if (w0Array != NULL) {
+                ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+            }
+            if (rdcc_nelmtsArray != NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nelmts, rdcc_nelmtsArray, JNI_ABORT);
+            }
+            h5JNIFatalError(env, "H5Pget_cache:  nbytesArray array not pinned");
+            return -1;
+        }
+    }
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long rdcc_nelmts_temp = *(rdcc_nelmtsArray);
+        size_t rdcc_nelmts_t = (size_t)rdcc_nelmts_temp;
+        long long nbytes_temp = *(nbytesArray);
+        size_t nbytes_t = (size_t)nbytes_temp;
+
+        status = H5Pget_cache((hid_t)plist, (int *)NULL, &rdcc_nelmts_t,
+                &nbytes_t, (double *)w0Array);
+
+        *rdcc_nelmtsArray = rdcc_nelmts_t;
+        *nbytesArray = nbytes_t;
+    }
+
+
+    if (status < 0) {
+        mode = JNI_ABORT;
+    }
+    else {
+        mode = 0; /* commit and free */
+    }
+
+    if (rdcc_nelmtsArray != NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nelmts, rdcc_nelmtsArray, mode);
+    }
+
+    if (nbytesArray != NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nbytes, nbytesArray, mode);
+    }
+
+    if (w0Array != NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, mode);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_buffer
+ * Signature: (IJ[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1buffer
+  (JNIEnv *env, jclass clss, jint plist, jlong size, jbyteArray tconv, jbyteArray bkg)
+{
+    h5unimplemented(env, "H5Pset_buffer:  not implemented");
+    return -1;
+#ifdef notdef
+
+/* DON'T IMPLEMENT THIS!!! */
+    jint     status = -1;
+    jbyte   *tconvP;
+    jbyte   *bkgP;
+    jboolean isCopy;
+
+    if (tconv == NULL)
+        tconvP = (jbyte *)NULL;
+    else {
+        tconvP = ENVPTR->GetByteArrayElements(ENVPAR tconv, &isCopy);
+        if (tconvP == NULL) {
+            h5JNIFatalError(env, "H5Pset_buffer:  tconv not pinned");
+            return -1;
+        }
+    }
+    if (bkg == NULL)
+        bkgP = (jbyte *)NULL;
+    else {
+        bkgP = ENVPTR->GetByteArrayElements(ENVPAR bkg, &isCopy);
+        if (bkgP == NULL) {
+            h5JNIFatalError(env, "H5Pset_buffer:  bkg not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pset_buffer((hid_t)plist, (size_t)size, tconvP, bkgP);
+    if (status < 0) {
+        if (tconv != NULL)
+            ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, JNI_ABORT);
+        if (bkg != NULL)
+            ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    if (tconv != NULL)
+        ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, 0);
+    if (bkg != NULL)
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, 0);
+
+    return status;
+#endif
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_buffer
+ * Signature: (I[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1buffer
+  (JNIEnv *env, jclass clss, jint plist, jbyteArray tconv, jbyteArray bkg)
+{
+    h5unimplemented(env, "H5Pset_buffer:  not implemented");
+    return -1;
+#ifdef notdef
+
+/* DON'T IMPLEMENT THIS!!! */
+    jlong     status = -1;
+    jbyte   *tconvP;
+    jbyte   *bkgP;
+    jboolean isCopy;
+
+    if (tconv == NULL) {
+        h5nullArgument(env, "H5Pget_buffer:  tconv input array is NULL");
+        return -1;
+    }
+    tconvP = ENVPTR->GetByteArrayElements(ENVPAR tconv, &isCopy);
+    if (tconvP == NULL) {
+        h5JNIFatalError(env, "H5Pget_buffer:  tconv not pinned");
+        return -1;
+    }
+    if (bkg == NULL) {
+        h5nullArgument(env, "H5Pget_buffer:  bkg array is NULL");
+        return -1;
+    }
+    bkgP = ENVPTR->GetByteArrayElements(ENVPAR bkg, &isCopy);
+    if (bkgP == NULL) {
+        h5JNIFatalError(env, "H5Pget_buffer:  bkg not pinned");
+        return -1;
+    }
+
+    status = H5Pget_buffer((hid_t)plist, tconvP, bkgP);
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, JNI_ABORT);
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, 0);
+
+    return status;
+#endif
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_buffer_size
+ * Signature: (IJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1buffer_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    herr_t    status = -1;
+
+    status = H5Pset_buffer((hid_t)plist, (size_t)size, NULL, NULL);
+    if (status < 0) {
+        h5libraryError(env);
+        return;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_buffer_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1buffer_1size
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    size_t     size = -1;
+
+    size = H5Pget_buffer((hid_t)plist, NULL, NULL);
+    if (size < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jlong)size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_preserve
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1preserve
+  (JNIEnv *env, jclass clss, jint plist, jboolean status)
+{
+    hbool_t st;
+    herr_t  retVal = -1;
+
+    if (status == JNI_TRUE) {
+        st = TRUE;
+    }
+    else if (status == JNI_FALSE) {
+        st = FALSE;
+    }
+    else {
+        /* exception -- bad argument */
+        h5badArgument(env, "H5Pset_preserve:  status not TRUE or FALSE");
+        return -1;
+    }
+
+    retVal = H5Pset_preserve((hid_t)plist, (hbool_t)st);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_preserve
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1preserve
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retValue = -1;
+
+    retValue = H5Pget_preserve((hid_t)plist);
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_deflate
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1deflate
+  (JNIEnv *env, jclass clss, jint plist, jint level)
+{
+    herr_t retValue = -1;
+
+    retValue = H5Pset_deflate((hid_t)plist, (int)level);
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_gc_references
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1gc_1references
+  (JNIEnv *env, jclass clss, jint fapl_id, jboolean gc_ref)
+{
+    herr_t   retVal = -1;
+    unsigned gc_ref_val;
+
+    if (gc_ref == JNI_TRUE) {
+        gc_ref_val = 1;
+    }
+    else {
+        gc_ref_val = 0;
+    }
+
+    retVal = H5Pset_gc_references((hid_t)fapl_id, gc_ref_val);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_gc_references
+ * Signature: (I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1gc_1references
+  (JNIEnv *env, jclass clss, jint fapl_id, jbooleanArray gc_ref)
+{
+    herr_t    status;
+    jboolean *theArray;
+    jboolean  isCopy;
+    unsigned  gc_ref_val = 0;
+
+    if (gc_ref == NULL) {
+        h5nullArgument(env, "H5Pget_gc_references:  gc_ref input array is NULL");
+        return -1;
+    }
+    theArray = (jboolean *)ENVPTR->GetBooleanArrayElements(ENVPAR gc_ref, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_gc_references:  gc_ref not pinned");
+        return -1;
+    }
+
+    status = H5Pget_gc_references((hid_t)fapl_id, (unsigned *)&gc_ref_val);
+    if (status < 0) {
+        ENVPTR->ReleaseBooleanArrayElements(ENVPAR gc_ref, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    if (gc_ref_val == 1) {
+        theArray[0] = JNI_TRUE;
+    }
+    else {
+        theArray[0] = JNI_FALSE;
+    }
+    ENVPTR->ReleaseBooleanArrayElements(ENVPAR gc_ref, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_gcreferences
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1gcreferences
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    herr_t    status;
+    unsigned  gc_ref_val = 0;
+
+    status = H5Pget_gc_references((hid_t)fapl_id, (unsigned *)&gc_ref_val);
+    if (status < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+    
+    if (gc_ref_val == 1) {
+        return JNI_TRUE;
+    }
+    return JNI_FALSE;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_btree_ratios
+ * Signature: (IDDD)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1btree_1ratios
+  (JNIEnv *env, jclass clss, jint plist_id, jdouble left, jdouble middle, jdouble right)
+{
+    herr_t status;
+
+    status = H5Pset_btree_ratios((hid_t)plist_id, (double)left,(double)middle, (double)right);
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_btree_ratios
+ * Signature: (I[D[D[D)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1btree_1ratios
+  (JNIEnv *env, jclass clss, jint plist_id, jdoubleArray left, jdoubleArray middle, jdoubleArray right)
+{
+    herr_t   status;
+    jdouble *leftP;
+    jdouble *middleP;
+    jdouble *rightP;
+    jboolean isCopy;
+
+    if (left == NULL) {
+        h5nullArgument(env, "H5Pget_btree_ratios:  left input array is NULL");
+        return -1;
+    }
+    if (middle == NULL) {
+        h5nullArgument(env, "H5Pget_btree_ratios:  middle input array is NULL");
+        return -1;
+    }
+    if (right == NULL) {
+        h5nullArgument(env, "H5Pget_btree_ratios:  right input array is NULL");
+        return -1;
+    }
+
+    leftP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR left, &isCopy);
+    if (leftP == NULL) {
+        h5JNIFatalError(env, "H5Pget_btree_ratios:  left not pinned");
+        return -1;
+    }
+    middleP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR middle, &isCopy);
+    if (middleP == NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+        h5JNIFatalError(env, "H5Pget_btree_ratios:  middle not pinned");
+        return -1;
+    }
+    rightP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR right, &isCopy);
+    if (rightP == NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, JNI_ABORT);
+        h5JNIFatalError(env, "H5Pget_btree_ratios:  middle not pinned");
+        return -1;
+    }
+
+    status = H5Pget_btree_ratios((hid_t)plist_id, (double *)leftP,
+             (double *)middleP, (double *)rightP);
+    if (status < 0) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, JNI_ABORT);
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR right, rightP, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, 0);
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, 0);
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR right, rightP, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_small_data_block_size
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1small_1data_1block_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    long   sz;
+    herr_t retVal = -1;
+
+    sz = (long)size;
+
+    retVal = H5Pset_small_data_block_size((hid_t)plist, (hsize_t)sz);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_small_data_block_size
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size
+  (JNIEnv *env, jclass clss, jint plist, jlongArray size)
+{
+    herr_t   status;
+    jlong   *theArray;
+    jboolean isCopy;
+    hsize_t  s;
+
+    if (size == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pget_small_user_block_size:  size is NULL");
+        return -1;
+    }
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_userblock:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_small_data_block_size((hid_t)plist, &s);
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = s;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_small_data_block_size_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size_1long
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t   status;
+    hsize_t  s;
+
+    status = H5Pget_small_data_block_size((hid_t)plist, &s);
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jlong)s;
+}
+
+
+/***************************************************************
+ *                   New APIs for HDF5.1.6                     *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_alloc_time
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1alloc_1time
+  (JNIEnv *env, jclass clss, jint plist, jint alloc_time)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_alloc_time((hid_t)plist, (H5D_alloc_time_t)alloc_time);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_alloc_time
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1alloc_1time
+  (JNIEnv *env, jclass clss, jint plist, jintArray alloc_time)
+{
+    herr_t           retVal = -1;
+    jint            *theArray;
+    jboolean         isCopy;
+    H5D_alloc_time_t time;
+
+    if (alloc_time == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pget_alloc_time:  alloc_time is NULL");
+        return -1;
+    }
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR alloc_time, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_alloc_time:  alloc_time not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_alloc_time((hid_t)plist, &time );
+    if (retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR alloc_time, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = time;
+    ENVPTR->ReleaseIntArrayElements(ENVPAR alloc_time, theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fill_time
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fill_1time
+  (JNIEnv *env, jclass clss, jint plist, jint fill_time)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_fill_time((hid_t)plist, (H5D_fill_time_t)fill_time);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fill_time
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fill_1time
+  (JNIEnv *env, jclass clss, jint plist, jintArray fill_time)
+{
+    herr_t          retVal = -1;
+    jint           *theArray;
+    jboolean        isCopy;
+    H5D_fill_time_t time;
+
+    if (fill_time == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pget_fill_time:  fill_time is NULL");
+        return -1;
+    }
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR fill_time, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_fill_time:  fill_time not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_fill_time((hid_t)plist, &time);
+    if (retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR fill_time, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = time;
+    ENVPTR->ReleaseIntArrayElements(ENVPAR fill_time, theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pfill_value_defined
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pfill_1value_1defined
+  (JNIEnv *env, jclass clss, jint plist, jintArray status)
+{
+    herr_t retVal = -1;
+    jint *theArray;
+    jboolean isCopy;
+    H5D_fill_value_t value;
+
+
+    if (status == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pfill_value_defined:  status is NULL");
+        return -1;
+    }
+    theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR status, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pfill_value_defined:  status not pinned");
+        return -1;
+    }
+
+    retVal = H5Pfill_value_defined((hid_t)plist, &value );
+    if (retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = value;
+    ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fletcher32
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fletcher32
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_fletcher32((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_edc_check
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1edc_1check
+  (JNIEnv *env, jclass clss, jint plist, jint check)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_edc_check((hid_t)plist, (H5Z_EDC_t)check);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_edc_check
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1edc_1check
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5Z_EDC_t retVal = (H5Z_EDC_t)-1;
+
+    retVal = H5Pget_edc_check((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_shuffle
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shuffle
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_shuffle((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_szip
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1szip
+  (JNIEnv *env, jclass clss, jint plist, jint options_mask, jint pixels_per_block)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_szip((hid_t)plist, (unsigned int)options_mask, (unsigned int)pixels_per_block);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_hyper_vector_size
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1hyper_1vector_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong vector_size)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_hyper_vector_size((hid_t)plist, (size_t)vector_size);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_hyper_vector_size
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1hyper_1vector_1size
+  (JNIEnv *env, jclass clss, jint plist, jlongArray vector_size)
+{
+    herr_t   retVal = -1;
+    jlong   *theArray;
+    size_t   size;
+    jboolean isCopy;
+
+    if (vector_size == NULL) {
+        /* exception ? */
+        h5nullArgument(env, "H5Pget_hyper_vector_size:  vector_size is NULL");
+        return -1;
+    }
+
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR vector_size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_hyper_vector_size:  vector_size not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_hyper_vector_size((hid_t)plist, &size);
+    if (retVal < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR vector_size, theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    theArray[0] = size;
+    ENVPTR->ReleaseLongArrayElements(ENVPAR vector_size, theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pall_filters_avail
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pall_1filters_1avail
+  (JNIEnv *env, jclass clss, jint dcpl_id)
+{
+    htri_t bval;
+
+    bval = H5Pall_filters_avail((hid_t)dcpl_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    }
+    else if (bval == 0) {
+        return JNI_FALSE;
+    }
+    else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pmodify_filter
+ * Signature: (IIIJ[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pmodify_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+  jlong cd_nelmts, jintArray cd_values)
+{
+    herr_t   status;
+    jint    *cd_valuesP;
+    jboolean isCopy;
+
+    if (cd_values == NULL) {
+        h5nullArgument(env, "H5Pmodify_filter:  cd_values is NULL");
+        return -1;
+    }
+
+    cd_valuesP = ENVPTR->GetIntArrayElements(ENVPAR cd_values,&isCopy);
+    if (cd_valuesP == NULL) {
+        h5JNIFatalError(env, "H5Pmodify_filter:  cd_values not pinned");
+        return -1;
+    }
+
+    status = H5Pmodify_filter((hid_t)plist, (H5Z_filter_t)filter,(const unsigned int)flags,
+              (size_t)cd_nelmts, (unsigned int *)cd_valuesP);
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter_by_id
+ * Signature: (II[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jintArray flags,
+  jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name)
+{
+    herr_t       status;
+    jint        *cd_valuesArray;
+    jint        *flagsArray;
+    jlong       *cd_nelmtsArray;
+    jboolean     isCopy;
+    jstring      str;
+    char        *aName;
+    int          i = 0;
+    int          rank;
+    long         bs;
+
+    bs = (long)namelen;
+    if (bs <= 0) {
+        h5badArgument(env, "H5Pget_filter_by_id:  namelen <= 0");
+        return -1;
+    }
+    if (flags == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  flags is NULL");
+        return -1;
+    }
+    if (cd_nelmts == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  cd_nelms is NULL");
+        return -1;
+    }
+    if (cd_values == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  cd_values is NULL");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  name is NULL");
+        return -1;
+    }
+
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory(env, "H5Pget_filter_by_id:  malloc failed");
+        return -1;
+    }
+
+    flagsArray = ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+    if (flagsArray == NULL) {
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  flags not pinned");
+        return -1;
+    }
+
+    cd_nelmtsArray = ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+    if (cd_nelmtsArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  cd_nelms not pinned");
+        return -1;
+    }
+
+    cd_valuesArray = ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+    rank = ENVPTR->GetArrayLength(ENVPAR cd_values);
+    if (cd_valuesArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  cd_values array not converted to unsigned int.");
+        return -1;
+    }
+
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long cd_nelmts_temp = *(cd_nelmtsArray);
+        size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+        unsigned int filter_config;
+
+        status = H5Pget_filter_by_id2( (hid_t)plist, (H5Z_filter_t)filter,
+                (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+                (size_t)namelen, (char *)aName, &filter_config);
+
+        *cd_nelmtsArray = cd_nelmts_t;
+    }
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        free(aName);
+        h5libraryError(env);
+        return -1;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+
+    free(aName);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fclose_degree
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fclose_1degree
+  (JNIEnv *env, jclass clss, jint plist, jint fc_degree)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_fclose_degree((hid_t)plist, (H5F_close_degree_t)fc_degree);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fclose_degree
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fclose_1degree
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5F_close_degree_t degree;
+    herr_t             retVal = -1;
+
+    retVal = H5Pget_fclose_degree((hid_t)plist, &degree);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)degree;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *                    File access properties                          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_family
+ * Signature: (IJI)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1family
+  (JNIEnv *env, jclass clss, jint plist, jlong memb_size, jint memb_plist)
+{
+    long   ms;
+    herr_t retVal = -1;
+
+    ms = (long)memb_size;
+    retVal = H5Pset_fapl_family((hid_t)plist, (hsize_t)ms, (hid_t)memb_plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fapl_family
+ * Signature: (I[J[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1family
+  (JNIEnv *env, jclass clss, jint tid, jlongArray memb_size, jintArray memb_plist)
+{
+    herr_t   status;
+    jlong   *sizeArray;
+    jint    *plistArray;
+    jboolean isCopy;
+    hsize_t *sa;
+    int      i;
+    int      rank;
+
+    if (memb_size == NULL) {
+        h5nullArgument(env, "H5Pget_family:  memb_size is NULL");
+        return -1;
+    }
+    if (memb_plist == NULL) {
+        h5nullArgument(env, "H5Pget_family:  memb_plist is NULL");
+        return -1;
+    }
+
+    sizeArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_size, &isCopy);
+    if (sizeArray == NULL) {
+        h5JNIFatalError(env,  "H5Pget_family:  sizeArray not pinned");
+        return -1;
+    }
+    rank  = ENVPTR->GetArrayLength(ENVPAR  memb_size);
+    sa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (sa == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+        h5JNIFatalError(env,  "H5Screate-simple:  dims not converted to hsize_t");
+        return -1;
+    }
+    plistArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_plist, &isCopy);
+    if (plistArray == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+        h5JNIFatalError(env,  "H5Pget_family:  plistArray not pinned");
+        return -1;
+    }
+
+    status = H5Pget_fapl_family ((hid_t)tid, sa, (hid_t *)plistArray);
+
+    if (status < 0) {
+        free(sa);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR memb_plist, plistArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    for (i = 0; i < rank; i++) {
+        sizeArray[i] = sa[i];
+    }
+    free(sa);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR memb_plist, plistArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_core
+ * Signature: (IJZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1core
+  (JNIEnv *env, jclass clss, jint fapl_id, jlong increment, jboolean backing_store)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_fapl_core((hid_t)fapl_id, (size_t)increment, (hbool_t)backing_store);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fapl_core
+ * Signature: (I[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1core
+  (JNIEnv *env, jclass clss, jint fapl_id, jlongArray increment, jbooleanArray backing_store)
+{
+    herr_t    status;
+    jlong    *incArray;
+    jboolean *backArray;
+    jboolean  isCopy;
+
+    if (increment == NULL) {
+        h5nullArgument(env, "H5Pget_fapl_core:  increment is NULL");
+        return -1;
+    }
+    if (backing_store == NULL) {
+        h5nullArgument(env, "H5Pget_fapl_core:  backing_store is NULL");
+        return -1;
+    }
+
+    incArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR increment, &isCopy);
+    if (incArray == NULL) {
+        h5JNIFatalError(env,  "H5Pget_fapl_core:  incArray not pinned");
+        return -1;
+    }
+
+    backArray = (jboolean *)ENVPTR->GetBooleanArrayElements(ENVPAR backing_store, &isCopy);
+    if (backArray == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, JNI_ABORT);
+        h5JNIFatalError(env, "H5Pget_fapl_core:  backArray not pinned");
+        return -1;
+    }
+
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long inc_temp = *(incArray);
+        size_t inc_t = (size_t)inc_temp;
+
+        status = H5Pget_fapl_core((hid_t)fapl_id, &inc_t, (hbool_t *)backArray);
+
+        *incArray = inc_t;
+    }
+
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, JNI_ABORT);
+        ENVPTR->ReleaseBooleanArrayElements(ENVPAR backing_store, backArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, 0);
+    ENVPTR->ReleaseBooleanArrayElements(ENVPAR backing_store, backArray, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_family_offset
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1family_1offset
+  (JNIEnv *env, jclass clss, jint fapl_id, jlong offset)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_family_offset ((hid_t)fapl_id, (hsize_t)offset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_family_offset
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1family_1offset
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    hsize_t offset = -1;
+    herr_t  retVal = -1;
+
+    retVal = H5Pget_family_offset ((hid_t)fapl_id, &offset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jlong)offset;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_log
+ * Signature: (ILjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1log
+  (JNIEnv *env, jclass clss, jint fapl_id, jstring logfile, jlong flags, jlong buf_size)
+{
+    herr_t   retVal = -1;
+    char    *pLogfile;
+    jboolean isCopy;
+
+    if (logfile == NULL) {
+        h5nullArgument(env, "H5Pset_fapl_log:  logfile is NULL");
+        return;
+    }
+
+    pLogfile = (char *)ENVPTR->GetStringUTFChars(ENVPAR logfile, &isCopy);
+
+    if (pLogfile == NULL) {
+        h5JNIFatalError(env, "H5Pset_fapl_log:  logfile not pinned");
+        return;
+    }
+
+#if (H5_VERS_RELEASE > 6) /* H5_VERSION_GE(1,8,7) */
+    retVal = H5Pset_fapl_log( (hid_t)fapl_id, (const char *)pLogfile, (unsigned long long)flags, (size_t)buf_size );
+#else
+    retVal = H5Pset_fapl_log( (hid_t)fapl_id, (const char *)pLogfile, (unsigned int)flags, (size_t)buf_size );
+#endif
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR logfile, pLogfile);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Premove_filter
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5P1remove_1filter
+  (JNIEnv *env, jclass clss, jint obj_id, jint filter)
+{
+    herr_t status;
+
+    status = H5Premove_filter ((hid_t)obj_id, (H5Z_filter_t)filter);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return status;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+    Modified by Peter Cao on July 26, 2006:
+        Some of the Generic Property APIs have callback function
+        pointers, which Java does not support. Only the Generic
+        Property APIs without function pointers are implemented
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset
+  (JNIEnv *env, jclass clss, jint plid, jstring name, jint val)
+{
+    char    *cstr;
+    jboolean isCopy;
+    hid_t    retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pset: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Pset: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pset((hid_t)plid, cstr, &val);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pexist
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pexist
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    hid_t    retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pexist: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Pexist: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Pexist((hid_t)plid, cstr);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_size
+ * Signature: (ILjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1size
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    hid_t    retVal = -1;
+    size_t   size;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pget_size: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pget_size: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_size((hid_t)plid, cstr, &size);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_nprops
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nprops
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t  retVal = -1;
+    size_t nprops;
+
+    retVal = H5Pget_nprops((hid_t)plid, &nprops);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong)nprops;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class_name
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class_1name
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    char   *c_str;
+    jstring j_str;
+
+    c_str = H5Pget_class_name((hid_t)plid);
+    if (c_str == NULL) {
+        h5libraryError(env);
+        return NULL;
+    }
+
+    j_str = ENVPTR->NewStringUTF(ENVPAR c_str);
+    H5free_memory(c_str);
+
+    if (j_str == NULL) {
+        h5JNIFatalError(env,"H5Pget_class_name: return string failed");
+    }
+
+    return j_str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class_parent
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class_1parent
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pget_class_parent((hid_t)plid);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pisa_class
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pisa_1class
+  (JNIEnv *env, jclass clss, jint plid, jint pcls)
+{
+    htri_t retVal = -1;
+
+    retVal = H5Pisa_class((hid_t)plid, (hid_t)pcls);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    jint     val;
+    jint     retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pget: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Pget: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget((hid_t)plid, cstr, &val);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pequal
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pequal
+  (JNIEnv *env, jclass clss, jint plid1, jint plid2)
+{
+    htri_t retVal = -1;
+
+    retVal = H5Pequal((hid_t)plid1, (hid_t)plid2);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcopy_prop
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pcopy_1prop
+  (JNIEnv *env, jclass clss, jint dst_plid, jint src_plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    jint     retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pcopy_prop: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Pcopy_prop: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Pcopy_prop((hid_t)dst_plid, (hid_t)src_plid, cstr);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Premove
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Premove
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    jint     retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Premove: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Premove: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Premove((hid_t)plid, cstr);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Punregister
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Punregister
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char    *cstr;
+    jboolean isCopy;
+    jint     retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument(env, "H5Punregister: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError(env, "H5Punregister: name not pinned");
+        return -1;
+    }
+
+    retVal = H5Punregister((hid_t)plid, cstr);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pclose_list
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pclose_1class
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pclose_class((hid_t)plid);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter2
+ * Signature: (II[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter2
+  (JNIEnv *env, jclass clss, jint plist, jint filter_number, jintArray flags,
+  jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name,
+  jintArray filter_config)
+{
+    herr_t   status;
+    jint    *flagsArray;
+    jlong   *cd_nelmtsArray;
+    jint    *cd_valuesArray;
+    jint    *filter_configArray;
+    jboolean isCopy;
+    char    *filter;
+    jstring  str;
+
+    if (namelen <= 0) {
+        h5badArgument(env, "H5Pget_filter:  namelen <= 0");
+        return -1;
+    }
+    if (flags == NULL) {
+        h5badArgument(env, "H5Pget_filter:  flags is NULL");
+        return -1;
+    }
+    if (cd_nelmts == NULL) {
+        h5badArgument(env, "H5Pget_filter:  cd_nelmts is NULL");
+        return -1;
+    }
+    if (filter_config == NULL) {
+        h5badArgument(env, "H5Pget_filter:  filter_config is NULL");
+        return -1;
+    }
+
+    filter = (char *)malloc(sizeof(char)*(size_t)namelen);
+    if (filter == NULL) {
+        h5outOfMemory(env, "H5Pget_filter:  namelent malloc failed");
+        return -1;
+    }
+    flagsArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+    if (flagsArray == NULL) {
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  flags array not pinned");
+        return -1;
+    }
+
+    cd_nelmtsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+    if (cd_nelmtsArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  nelmts array not pinned");
+        return -1;
+    }
+    filter_configArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR filter_config, &isCopy);
+    if (filter_configArray == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  filter_config array not pinned");
+        return -1;
+    }
+
+    if (*cd_nelmtsArray == 0 && cd_values == NULL) {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long cd_nelmts_temp = 0;
+        size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+        status = H5Pget_filter2((hid_t)plist, (int)filter_number,
+            (unsigned int *)flagsArray, &cd_nelmts_t, NULL,
+            (size_t)namelen, filter, (unsigned int *)filter_configArray);
+
+        *cd_nelmtsArray = cd_nelmts_t;
+    }
+    else {
+		if (cd_values == NULL) {
+			h5badArgument(env, "H5Pget_filter:  cd_values is NULL");
+			return -1;
+		}
+		cd_valuesArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+		if (cd_valuesArray == NULL)  {
+	        ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+			ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+			ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+			free(filter);
+			h5JNIFatalError(env,  "H5Pget_filter:  elmts array not pinned");
+			return -1;
+		}
+
+		{
+			/* direct cast (size_t *)variable fails on 32-bit environment */
+			long long cd_nelmts_temp = *(cd_nelmtsArray);
+			size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+			status = H5Pget_filter2((hid_t)plist, (int)filter_number,
+				(unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+				(size_t)namelen, filter, (unsigned int *)filter_configArray);
+
+			*cd_nelmtsArray = cd_nelmts_t;
+		}
+    }
+
+    if (status < 0) {
+		if (cd_values)
+	        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(filter);
+        h5libraryError(env);
+        return -1;
+    }
+	if (cd_values)
+	    ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, 0);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+    /*  NewStringUTF may throw OutOfMemoryError */
+    str = ENVPTR->NewStringUTF(ENVPAR filter);
+    if (str == NULL) {
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  return string not pinned");
+        return -1;
+    }
+    /*  SetObjectArrayElement may throw exceptiosn */
+    ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+
+    free(filter);
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter_by_id2
+ * Signature: (II[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id2
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jintArray flags,
+  jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name, jintArray filter_config)
+{
+    herr_t   status;
+    int      i = 0;
+    jint    *cd_valuesArray;
+    jint    *flagsArray;
+    jint    *filter_configArray;
+    jlong   *cd_nelmtsArray;
+    jboolean isCopy;
+    long     bs;
+    char    *aName;
+    jstring  str;
+
+    bs = (long)namelen;
+    if (bs <= 0) {
+        h5badArgument(env, "H5Pget_filter_by_id:  namelen <= 0");
+        return -1;
+    }
+    if (flags == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  flags is NULL");
+        return -1;
+    }
+    if (cd_nelmts == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  cd_nelms is NULL");
+        return -1;
+    }
+    if (cd_values == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  cd_values is NULL");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument(env, "H5Pget_filter_by_id:  name is NULL");
+        return -1;
+    }
+    if (filter_config == NULL) {
+        h5badArgument(env, "H5Pget_filter_by_id:  filter_config is NULL");
+        return -1;
+    }
+
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory(env, "H5Pget_filter_by_id:  malloc failed");
+        return -1;
+    }
+    flagsArray = ENVPTR->GetIntArrayElements(ENVPAR flags,&isCopy);
+    if (flagsArray == NULL) {
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  flags not pinned");
+        return -1;
+    }
+    cd_nelmtsArray = ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+    if (cd_nelmtsArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  cd_nelms not pinned");
+        return -1;
+    }
+    cd_valuesArray = ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+    if (cd_valuesArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        free(aName);
+        h5JNIFatalError(env, "H5Pget_filter_by_id:  cd_values array not converted to unsigned int.");
+        return -1;
+    }
+    filter_configArray = ENVPTR->GetIntArrayElements(ENVPAR filter_config, &isCopy);
+    if (filter_configArray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        free(aName);
+        h5JNIFatalError(env,  "H5Pget_filter_by_id:  flags not pinned");
+        return -1;
+    }
+
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long cd_nelmts_temp = *(cd_nelmtsArray);
+        size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+        status = H5Pget_filter_by_id2((hid_t)plist, (H5Z_filter_t)filter,
+            (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+            (size_t)namelen, (char *)aName, (unsigned int *)filter_configArray);
+
+        *cd_nelmtsArray = cd_nelmts_t;
+    }
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+        ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+        free(aName);
+        h5libraryError(env);
+        return -1;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, 0);
+
+    free(aName);
+
+    return (jint)status;
+}
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.8.0                               *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_nlinks
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nlinks
+  (JNIEnv *env, jclass clss, jint lapl_id)
+{
+    herr_t retVal = -1;
+    size_t nlinks;
+
+    retVal = H5Pget_nlinks((hid_t)lapl_id, &nlinks);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) nlinks;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_nlinks
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1nlinks
+  (JNIEnv *env, jclass clss, jint lapl_id, jlong nlinks)
+{
+     herr_t retVal = -1;
+
+     if (nlinks <= 0) {
+         h5badArgument(env, "H5Pset_1nlinks:  nlinks_l <= 0");
+         return -1;
+     }
+
+     retVal = H5Pset_nlinks((hid_t)lapl_id, (size_t)nlinks);
+     if(retVal < 0) {
+         h5libraryError(env);
+     }
+
+     return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_libver_bounds
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1libver_1bounds
+  (JNIEnv *env, jclass clss, jint fapl_id, jintArray libver)
+{
+    herr_t        retVal = -1;
+    H5F_libver_t *theArray = NULL;
+    jboolean      isCopy;
+
+    if (libver == NULL) {
+        h5nullArgument(env, "H5Pget_libver_bounds:  libversion bounds is NULL");
+        return -1;
+    }
+    theArray = (H5F_libver_t *)ENVPTR->GetIntArrayElements(ENVPAR libver, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_libver_bounds:  input not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_libver_bounds((hid_t)fapl_id, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR libver, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR libver, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_libver_bounds
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1libver_1bounds
+  (JNIEnv *env, jclass clss, jint fapl_id, jint low, jint high)
+{
+    herr_t retVal = -1;
+
+    if ((H5F_libver_t)high != H5F_LIBVER_LATEST) {
+        h5badArgument(env, "H5Pset_libver_bounds:  invalid high library version bound");
+        return -1;
+    }
+
+    if(((H5F_libver_t)low !=H5F_LIBVER_EARLIEST) && ((H5F_libver_t)low != H5F_LIBVER_LATEST)) {
+        h5badArgument(env, "H5Pset_libver_bounds:  invalid low library version bound");
+        return -1;
+    }
+
+    retVal = H5Pset_libver_bounds((hid_t)fapl_id, (H5F_libver_t)low, (H5F_libver_t)high);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_link_creation_order
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1link_1creation_1order
+  (JNIEnv *env, jclass clss, jint gcpl_id)
+{
+    herr_t   retVal = -1;
+    unsigned crt_order_flags;
+
+    retVal = H5Pget_link_creation_order((hid_t)gcpl_id, &crt_order_flags);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)crt_order_flags;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_link_creation_order
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1link_1creation_1order
+  (JNIEnv *env, jclass clss, jint gcpl_id, jint crt_order_flags)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_link_creation_order((hid_t)gcpl_id, crt_order_flags);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_attr_creation_order
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1attr_1creation_1order
+  (JNIEnv *env, jclass clss, jint ocpl_id)
+{
+    herr_t   retVal = -1;
+    unsigned crt_order_flags;
+
+    retVal = H5Pget_attr_creation_order((hid_t)ocpl_id, &crt_order_flags);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)crt_order_flags;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_attr_creation_order
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1attr_1creation_1order
+  (JNIEnv *env, jclass clss, jint ocpl_id, jint crt_order_flags)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_attr_creation_order((hid_t)ocpl_id, crt_order_flags);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_copy_object
+ * Signature: (II)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1copy_1object
+  (JNIEnv *env, jclass clss, jint ocp_plist_id, jint copy_options)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_copy_object((hid_t)ocp_plist_id, (unsigned)copy_options);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_copy_object
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1copy_1object
+  (JNIEnv *env, jclass clss, jint ocp_plist_id)
+{
+    herr_t   retVal = -1;
+    unsigned copy_options;
+
+    retVal = H5Pget_copy_object((hid_t)ocp_plist_id, &copy_options);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)copy_options;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_create_intermediate_group
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1create_1intermediate_1group
+  (JNIEnv *env, jclass clss, jint lcpl_id, jboolean crt_intermed_group)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_create_intermediate_group((hid_t)lcpl_id, (unsigned)crt_intermed_group);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_create_intermediate_group
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1create_1intermediate_1group
+  (JNIEnv *env, jclass clss, jint lcpl_id)
+{
+    herr_t retVal = -1;
+    unsigned crt_intermed_group;
+
+    retVal = H5Pget_create_intermediate_group((hid_t)lcpl_id, &crt_intermed_group);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jboolean)crt_intermed_group;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_data_transform
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1data_1transform
+  (JNIEnv *env, jclass clss, jint plist_id, jstring expression)
+{
+    herr_t    retVal = -1;
+    char     *express;
+    jboolean  isCopy;
+
+    if (expression == NULL) {
+        h5nullArgument(env, "H5Pset_data_transform:  expression is NULL");
+        return -1;
+    }
+    express = (char *)ENVPTR->GetStringUTFChars(ENVPAR expression, &isCopy);
+    if (express == NULL) {
+        h5JNIFatalError(env, "H5Pset_data_transform: expression is not pinned");
+        return -1;
+    }
+    retVal = H5Pset_data_transform((hid_t)plist_id, (const char*)express);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR expression, express);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_data_transform
+ * Signature: (I[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1data_1transform
+  (JNIEnv *env, jclass clss, jint plist_id, jobjectArray expression, jlong size)
+{
+    ssize_t  buf_size;
+    char    *express;
+    jlong    express_size;
+    jstring  str = NULL;
+
+    if (size <= 0) {
+        h5badArgument(env, "H5Pget_data_transform:  size <= 0");
+        return -1;
+    }
+    express_size = (jlong)H5Pget_data_transform((hid_t)plist_id, (char*)NULL, (size_t)size);
+    if(express_size < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+    buf_size = (ssize_t)express_size + 1;/* add extra space for the null terminator */
+    express = (char*)malloc(sizeof(char)*buf_size);
+    if (express == NULL) {
+        h5outOfMemory(env, "H5Pget_data_transform:  malloc failed ");
+        return -1;
+    }
+
+    express_size = (jlong)H5Pget_data_transform((hid_t)plist_id, (char*)express, (size_t)size);
+    if (express_size < 0) {
+        free(express);
+        h5libraryError(env);
+        return -1;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR express);
+    if (str == NULL) {
+        /* exception -- fatal JNI error */
+        free(express);
+        h5JNIFatalError(env, "H5Pget_data_transform:  return string not created");
+        return -1;
+    }
+    ENVPTR->SetObjectArrayElement(ENVPAR expression, 0, str);
+    free(express);
+
+    return express_size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_elink_acc_flags
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1acc_1flags
+  (JNIEnv *env, jclass clss, jint lapl_id)
+{
+    herr_t   status;
+    unsigned flags;
+
+    status = H5Pget_elink_acc_flags((hid_t)lapl_id, &flags);
+    if(status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)flags;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_elink_acc_flags
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1acc_1flags
+  (JNIEnv *env, jclass clss, jint lapl_id, jint flags)
+{
+    herr_t retVal;
+
+    if (((unsigned) flags != H5F_ACC_RDWR) &&
+            ((unsigned) flags != H5F_ACC_RDONLY) &&
+            ((unsigned) flags != H5F_ACC_DEFAULT)) {
+        h5badArgument(env, "H5Pset_elink_acc_flags: invalid flags value");
+        return -1;
+    }
+
+    retVal = H5Pset_elink_acc_flags((hid_t)lapl_id, (unsigned)flags);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_link_phase_change
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1link_1phase_1change
+  (JNIEnv *env, jclass clss, jint gcpl_id, jint max_compact, jint min_dense)
+{
+    herr_t retVal;
+
+    if(max_compact < min_dense) {
+        h5badArgument(env, "H5Pset_link_phase_change: max compact value must be >= min dense value");
+        return -1;
+    }
+    if(max_compact > 65535) {
+        h5badArgument(env, "H5Pset_link_phase_change: max compact value must be < 65536");
+        return -1;
+    }
+    if(min_dense > 65535) {
+        h5badArgument(env, "H5Pset_link_phase_change: min dense value must be < 65536");
+        return -1;
+    }
+
+    retVal = H5Pset_link_phase_change((hid_t)gcpl_id, (unsigned)max_compact, (unsigned)min_dense);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_link_phase_change
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1link_1phase_1change
+  (JNIEnv *env, jclass clss, jint gcpl_id, jintArray links)
+{
+    herr_t    retVal = -1;
+    unsigned *theArray = NULL;
+    jboolean  isCopy;
+
+    if (links == NULL) {
+        h5nullArgument( env, "H5Pget_link_phase_change:  links is NULL");
+        return -1;
+    }
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR links, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_link_phase_change:  input not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_link_phase_change((hid_t)gcpl_id, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR links, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR links, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_attr_phase_change
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1attr_1phase_1change
+  (JNIEnv *env, jclass clss, jint ocpl_id, jintArray attributes)
+{
+    herr_t    retVal = -1;
+    unsigned *theArray = NULL;
+    jboolean  isCopy;
+
+    if (attributes == NULL) {
+        h5nullArgument(env, "H5Pget_attr_phase_change:  attributes is NULL");
+        return -1;
+    }
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR attributes, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_attr_phase_change:  input not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_attr_phase_change((hid_t)ocpl_id, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR attributes, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR attributes, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_shared_mesg_phase_change
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1phase_1change
+  (JNIEnv *env, jclass clss, jint fcpl_id, jintArray size)
+{
+    herr_t    retVal = -1;
+    unsigned *theArray = NULL;
+    jboolean  isCopy;
+
+    if (size == NULL) {
+        h5nullArgument(env, "H5Pget_shared_mesg_phase_change:  size is NULL");
+        return -1;
+    }
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR size, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_shared_mesg_phase_change:  input not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_shared_mesg_phase_change((hid_t)fcpl_id, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR size, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR size, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_shared_mesg_phase_change
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1phase_1change
+  (JNIEnv *env, jclass clss, jint fcpl_id, jint max_list, jint min_btree)
+{
+    herr_t retVal;
+
+    /* Check that values are sensible.  The min_btree value must be no greater
+     * than the max list plus one.
+     *
+     * Range check to make certain they will fit into encoded form.
+     */
+
+    if(max_list + 1 < min_btree) {
+        h5badArgument(env, "H5Pset_shared_mesg_phase_change: minimum B-tree value is greater than maximum list value");
+        return -1;
+    }
+    if(max_list > H5O_SHMESG_MAX_LIST_SIZE) {
+        h5badArgument(env, "H5Pset_shared_mesg_phase_change: max list value is larger than H5O_SHMESG_MAX_LIST_SIZE");
+        return -1;
+    }
+    if(min_btree > H5O_SHMESG_MAX_LIST_SIZE) {
+        h5badArgument(env, "H5Pset_shared_mesg_phase_change: min btree value is larger than H5O_SHMESG_MAX_LIST_SIZE");
+        return -1;
+    }
+
+    retVal = H5Pset_shared_mesg_phase_change((hid_t)fcpl_id, (unsigned)max_list, (unsigned)min_btree);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_shared_mesg_nindexes
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1nindexes
+  (JNIEnv *env, jclass clss, jint fcpl_id)
+{
+    herr_t   status;
+    unsigned nindexes;
+
+    status = H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes);
+    if(status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)nindexes;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_shared_mesg_nindexes
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1nindexes
+  (JNIEnv *env, jclass clss, jint plist_id, jint nindexes)
+{
+    herr_t retVal;
+
+    if (nindexes > H5O_SHMESG_MAX_NINDEXES) {
+        h5badArgument(env, "H5Pset_shared_mesg_nindexes: number of indexes is greater than H5O_SHMESG_MAX_NINDEXES");
+        return -1;
+    }
+
+    retVal = H5Pset_shared_mesg_nindexes((hid_t)plist_id, (unsigned)nindexes);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_shared_mesg_index
+ * Signature: (IIII)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1index
+  (JNIEnv *env, jclass clss, jint fcpl_id, jint index_num, jint mesg_type_flags, jint min_mesg_size)
+{
+    herr_t      retVal;
+    unsigned    nindexes;/* Number of SOHM indexes */
+
+    /* Check arguments */
+    if(mesg_type_flags > H5O_SHMESG_ALL_FLAG) {
+        h5badArgument(env, "H5Pset_shared_mesg_index: unrecognized flags in mesg_type_flags");
+        return -1;
+    }
+    /* Read the current number of indexes */
+    if(H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes) < 0) {
+        h5libraryError(env);
+    }
+    /* Range check */
+    if((unsigned)index_num >= nindexes) {
+        h5badArgument(env, "H5Pset_shared_mesg_index: index_num is too large; no such index");
+        return -1;
+    }
+
+    retVal = H5Pset_shared_mesg_index((hid_t)fcpl_id, (unsigned)index_num, (unsigned) mesg_type_flags, (unsigned) min_mesg_size);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_shared_mesg_index
+ * Signature: (II[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1index
+  (JNIEnv *env, jclass clss, jint fcpl_id, jint index_num, jintArray mesg_info)
+{
+    herr_t    retVal = -1;
+    unsigned  nindexes;/* Number of SOHM indexes */
+    unsigned *theArray = NULL;
+    jboolean  isCopy;
+
+    /* Read the current number of indexes */
+    if(H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes)<0) {
+        h5libraryError(env);
+    }
+    /* Range check */
+    if((unsigned)index_num >= nindexes) {
+        h5badArgument(env, "H5Pget_shared_mesg_index: index_num is too large; no such index");
+        return -1;
+    }
+    if (mesg_info == NULL) {
+        h5nullArgument(env, "H5Pget_shared_mesg_index:  mesg_info is NULL");
+        return -1;
+    }
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR mesg_info, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_shared_mesg_index:  input not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_shared_mesg_index((hid_t)fcpl_id, (unsigned)index_num, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR mesg_info, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR mesg_info, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_local_heap_size_hint
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint
+  (JNIEnv *env, jclass clss, jint gcpl_id, jlong size_hint)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_local_heap_size_hint((hid_t)gcpl_id, (size_t)size_hint);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_local_heap_size_hint
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint
+  (JNIEnv *env, jclass clss, jint gcpl_id)
+{
+    herr_t status;
+    size_t size_hint;
+
+    status = H5Pget_local_heap_size_hint((hid_t)gcpl_id, &size_hint);
+    if(status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong)size_hint;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_nbit
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1nbit
+  (JNIEnv *env, jclass clss, jint plist_id)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_nbit((hid_t)plist_id);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_scaleoffset
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1scaleoffset
+  (JNIEnv *env, jclass clss, jint plist_id, jint scale_type, jint scale_factor)
+{
+    herr_t retVal = -1;
+
+    /* Check arguments */
+    if(scale_factor < 0) {
+        h5badArgument(env, "H5Pset_scaleoffset: scale factor must be > 0");
+        return -1;
+    }
+    if(scale_type != H5Z_SO_FLOAT_DSCALE && scale_type != H5Z_SO_FLOAT_ESCALE && scale_type != H5Z_SO_INT){
+        h5badArgument(env, "H5Pset_scaleoffset: invalid scale type");
+        return -1;
+    }
+
+    retVal = H5Pset_scaleoffset((hid_t)plist_id, (H5Z_SO_scale_type_t)scale_type, scale_factor);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_est_link_info
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1est_1link_1info
+  (JNIEnv *env, jclass clss, jint gcpl_id, jint est_num_entries, jint est_name_len)
+{
+    herr_t retVal = -1;
+
+    /* Range check values */
+    if(est_num_entries > 65535) {
+        h5badArgument(env, "H5Pset_est_link_info: est. number of entries must be < 65536");
+        return -1;
+    }
+    if(est_name_len > 65535) {
+        h5badArgument(env, "H5Pset_est_link_info: est. name length must be < 65536");
+        return -1;
+    }
+
+    retVal = H5Pset_est_link_info((hid_t)gcpl_id, (unsigned)est_num_entries, (unsigned)est_name_len);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_est_link_info
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1est_1link_1info
+  (JNIEnv *env, jclass clss, jint gcpl_id, jintArray link_info)
+{
+    herr_t    retVal = -1;
+    unsigned *theArray = NULL;
+    jboolean  isCopy;
+
+    if (link_info == NULL) {
+        h5nullArgument(env, "H5Pget_est_link_info:  link_info is NULL");
+        return -1;
+    }
+    theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR link_info,&isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_est_link_info:  input not pinned");
+        return -1;
+    }
+
+    retVal= H5Pget_est_link_info((hid_t)gcpl_id, &(theArray[0]), &(theArray[1]));
+    if(retVal < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR link_info, (jint *)theArray, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR link_info, (jint *)theArray, 0);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_elink_fapl
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1fapl
+  (JNIEnv *env, jclass clss, jint lapl_id, jint fapl_id)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_elink_fapl((hid_t)lapl_id, (hid_t)fapl_id);
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Pget_elink_fapl
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pget_1elink_1fapl
+  (JNIEnv *env, jclass clss, jint lapl_id)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pget_elink_fapl((hid_t)lapl_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_elink_prefix
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1prefix
+  (JNIEnv *env, jclass clss, jint lapl_id, jstring prefix)
+{
+    herr_t      retVal = -1;
+    const char *aName;
+    jboolean    isCopy;
+
+    if (prefix == NULL) {
+        h5nullArgument(env, "H5Pset_elink_prefix: prefix is NULL");
+        return -1;
+    }
+    aName = (const char*)ENVPTR->GetStringUTFChars(ENVPAR prefix, &isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError(env, "H5Pset_elink_prefix: prefix not pinned");
+        return -1;
+    }
+
+    retVal = H5Pset_elink_prefix((hid_t)lapl_id, aName);
+    if(retVal < 0) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR prefix, aName);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseStringUTFChars(ENVPAR prefix, aName);
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_elink_prefix
+ * Signature: (I[Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1prefix
+  (JNIEnv *env, jclass clss, jint lapl_id, jobjectArray prefix)
+{
+    size_t  size = -1;
+    char   *pre;
+    jlong   prefix_size;
+    jstring str = NULL;
+
+    if (prefix == NULL) {
+        h5nullArgument(env, "H5Pget_elink_prefix: prefix is NULL");
+        return -1;
+    }
+    prefix_size = (jlong)H5Pget_elink_prefix((hid_t)lapl_id, (char*)NULL, size);
+    if(prefix_size < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+    size = (size_t)prefix_size + 1;/* add extra space for the null terminator */
+    pre = (char*)malloc(sizeof(char)*size);
+    if (pre == NULL) {
+        h5outOfMemory(env, "H5Pget_elink_prefix:  malloc failed ");
+        return -1;
+    }
+    prefix_size = (jlong)H5Pget_elink_prefix((hid_t)lapl_id, (char*)pre, size);
+    if (prefix_size < 0) {
+        free(pre);
+        h5libraryError(env);
+        return -1;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR pre);
+    if (str == NULL) {
+        /* exception -- fatal JNI error */
+        free(pre);
+        h5JNIFatalError(env, "H5Pget_elink_prefix:  return string not created");
+        return -1;
+    }
+    ENVPTR->SetObjectArrayElement(ENVPAR prefix, 0, str);
+    free(pre);
+
+    return prefix_size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_direct
+ * Signature: (IJJJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1direct
+ (JNIEnv *env, jclass clss, jint fapl_id, jlong alignment, jlong block_size, jlong cbuf_size)
+{
+    herr_t retVal = -1;
+
+#ifdef H5_HAVE_DIRECT
+    retVal = H5Pset_fapl_direct((hid_t)fapl_id, (size_t)alignment, (size_t)block_size, (size_t)cbuf_size);
+#endif
+    if(retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fapl_direct
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1direct
+ (JNIEnv *env, jclass clss, jint fapl_id, jlongArray info)
+{
+    herr_t   retVal = -1;
+
+#ifdef H5_HAVE_DIRECT
+    size_t   alignment = 0;
+    size_t   block_size = 0;
+    size_t   cbuf_size = 0;
+    jlong   *theArray;
+    jboolean isCopy;
+    if (info == NULL) {
+        h5nullArgument(env, "H5Pget_fapl_direct:  info input array is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR info) < 3) {
+        h5badArgument( env, "H5Pget_fapl_direct:  info input array < 4");
+        return -1;
+    }
+
+    theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR info, &isCopy);
+    if (theArray == NULL) {
+        h5JNIFatalError(env, "H5Pget_fapl_direct:  info not pinned");
+        return -1;
+    }
+
+    retVal = H5Pget_fapl_direct((hid_t)fapl_id, &alignment, &block_size, &cbuf_size);
+    if(retVal <0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR info, theArray, JNI_ABORT);
+        h5libraryError(env);
+    }
+    else {
+        theArray[0] = alignment;
+        theArray[1] = block_size;
+        theArray[2] = cbuf_size;
+        ENVPTR->ReleaseLongArrayElements(ENVPAR info, theArray, 0);
+    }
+#else
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+#endif
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_sec2
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1sec2
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_fapl_sec2((hid_t) fapl_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_stdio
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1stdio
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_fapl_stdio((hid_t) fapl_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_windows
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1windows
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    herr_t retVal = -1;
+
+#ifdef H5_HAVE_WINDOWS
+    retVal = H5Pset_fapl_windows((hid_t) fapl_id);
+#endif
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fapl_muti
+ * Signature: (I[I[I[Ljava/lang/String;[J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1multi
+  (JNIEnv *env, jclass clss, jint tid, jintArray memb_map, jintArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr)
+{
+    herr_t   status;
+    int      i;
+    jint    *themapArray = NULL;
+    jint    *thefaplArray = NULL;
+    jlong   *theaddrArray = NULL;
+    char   **mName = NULL;
+    jstring  str;
+    jboolean isCopy;
+    int relax = 0;
+    
+    if (memb_map) {
+        themapArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_map, &isCopy);
+        if (themapArray == NULL) {
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_map not pinned");
+            return -1;
+        }
+    }
+
+    if (memb_fapl) {
+        thefaplArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_fapl, &isCopy);
+        if (thefaplArray == NULL) {
+            if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_fapl not pinned");
+            return -1;
+        }
+    }
+
+    if (memb_addr) {
+        theaddrArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_addr, &isCopy);
+        if (theaddrArray == NULL) {
+            if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+            if (memb_fapl) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_addr not pinned");
+            return -1;
+        }
+    }
+    
+    if (memb_name) mName = (char **)calloc(H5FD_MEM_NTYPES, sizeof (*mName));
+    
+    status = H5Pget_fapl_multi((hid_t)tid, (H5FD_mem_t *)themapArray, (hid_t *)thefaplArray, mName, (haddr_t *)theaddrArray, (hbool_t *)&relax);
+    if (status < 0) {
+        if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+        if (memb_fapl) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+        if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, JNI_ABORT);
+        if (memb_name) h5str_array_free(mName, H5FD_MEM_NTYPES);
+        h5libraryError(env);
+        return -1;
+    }
+
+    if (memb_map) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, 0);
+    }
+
+    if (memb_fapl) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, 0);
+    }
+
+    if (memb_addr) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, 0);
+    }
+
+    if (memb_name) {
+        if (mName) {
+            for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+                if (*(mName + i)) {
+                    str = ENVPTR->NewStringUTF(ENVPAR *(mName+i));
+                    ENVPTR->SetObjectArrayElement(ENVPAR memb_name, i, (jobject)str);
+                }
+            } /* for (i=0; i<n; i++)*/
+        }
+        h5str_array_free(mName, H5FD_MEM_NTYPES);
+    }
+    
+    return (relax!=0);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_muti
+ * Signature: (I[I[I[Ljava/lang/String;[JZ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1multi
+  (JNIEnv *env, jclass clss, jint tid, jintArray memb_map, jintArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr, jboolean relax)
+{
+    herr_t       status;
+    jint        *themapArray = NULL;
+    jint        *thefaplArray = NULL;
+    jlong       *theaddrArray = NULL;
+    jboolean     isCopy;
+    jclass       Sjc;
+    jstring      rstring;
+    jobject      o;
+    jboolean     bb;
+    const char **mName = NULL;
+    char  *member_name[H5FD_MEM_NTYPES];
+    
+    if (memb_map) {
+        themapArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_map, &isCopy);
+        if (themapArray == NULL) {
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_map not pinned");
+            return;
+        }
+    }
+
+    if (memb_fapl) {
+        thefaplArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_fapl, &isCopy);
+        if (thefaplArray == NULL) {
+            if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_fapl not pinned");
+            return;
+        }
+    }
+
+    if (memb_addr) {
+        theaddrArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_addr, &isCopy);
+        if (theaddrArray == NULL) {
+            if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+            if (memb_fapl) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+            h5JNIFatalError(env, "H5Pget_fapl_muti:  memb_addr not pinned");
+            return;
+        }
+    }
+
+    memset(member_name, 0, H5FD_MEM_NTYPES * sizeof(char*));
+    if (memb_name) {
+        int i;
+        for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+            jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray) memb_name, i);
+            if (obj != 0) {
+                jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+                const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+                if (utf8) {
+                    member_name[i] = (char*)malloc(strlen(utf8) + 1);
+                    if (member_name[i]) {
+                        strcpy(member_name[i], utf8);
+                    }
+                }
+
+                ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+                ENVPTR->DeleteLocalRef(ENVPAR obj);
+            }
+        }
+        mName = (const char **)member_name;
+    }
+    
+    status = H5Pset_fapl_multi((hid_t)tid, (const H5FD_mem_t *)themapArray, (const hid_t *)thefaplArray, mName, (const haddr_t *)theaddrArray, (hbool_t)relax);
+
+    if (status < 0) {
+        if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+        if (memb_fapl) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+        if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, JNI_ABORT);
+        h5libraryError(env);
+        return;
+    }
+    if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, 0);
+    if (memb_fapl) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_fapl, thefaplArray, 0);
+    if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, 0);
+    if (memb_name) {
+        if (mName != NULL) {
+            int i;
+            Sjc = ENVPTR->FindClass(ENVPAR  "java/lang/String");
+            if (Sjc == NULL) {
+                return;
+            }
+            for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+                rstring = ENVPTR->NewStringUTF(ENVPAR member_name[i]);
+                o = ENVPTR->GetObjectArrayElement(ENVPAR memb_name, i);
+                if (o == NULL) {
+                    return;
+                }
+                bb = ENVPTR->IsInstanceOf(ENVPAR o, Sjc);
+                if (bb == JNI_FALSE) {
+                    return;
+                }
+                ENVPTR->SetObjectArrayElement(ENVPAR memb_name, i, (jobject)rstring);
+                ENVPTR->DeleteLocalRef(ENVPAR o);
+                free(member_name[i]);
+            }
+        }
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fapl_split
+ * Signature: (ILjava/lang/String;ILjava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1split
+  (JNIEnv *env, jclass clss, jint fapl_id, jstring metaext, jint meta_pl_id, jstring rawext, jint raw_pl_id)
+{
+    herr_t retVal = -1;
+    const char    *mstr;
+    const char    *rstr;
+    jboolean isCopy;
+
+    if (metaext == NULL) {
+        h5nullArgument( env, "H5Pset_fapl_split: metaext is NULL");
+        return;
+    }
+    mstr = (const char *)ENVPTR->GetStringUTFChars(ENVPAR metaext, &isCopy);
+    if (mstr == NULL) {
+        h5JNIFatalError( env, "H5Pset_fapl_split: metaext not pinned");
+        return;
+    }
+
+    if (rawext == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR metaext, mstr);
+        h5nullArgument( env, "H5Pset_fapl_split: rawext is NULL");
+        return;
+    }
+    rstr = (const char *)ENVPTR->GetStringUTFChars(ENVPAR rawext, &isCopy);
+    if (rstr == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR metaext, mstr);
+        h5JNIFatalError( env, "H5Pset_fapl_split: rawext not pinned");
+        return;
+    }
+
+    retVal = H5Pset_fapl_split((hid_t)fapl_id, mstr, (hid_t)meta_pl_id, rstr, (hid_t)raw_pl_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR metaext, mstr);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR rawext, rstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_meta_block_size
+ * Signature: (IJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1meta_1block_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    long   sz;
+    herr_t status = -1;
+
+    sz = (long)size;
+
+    status = H5Pset_meta_block_size((hid_t)plist, (hsize_t)sz);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_meta_block_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1meta_1block_1size
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t   status;
+    hsize_t  s;
+
+    status = H5Pget_meta_block_size((hid_t)plist, &s);
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jlong)s;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sieve_buf_size
+ * Signature: (IJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sieve_1buf_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    size_t   sz;
+    herr_t status = -1;
+
+    sz = (size_t)size;
+
+    status = H5Pset_sieve_buf_size((hid_t)plist, (size_t)sz);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sieve_buf_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sieve_1buf_1size
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t   status;
+    size_t  s;
+
+    status = H5Pget_sieve_buf_size((hid_t)plist, &s);
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jlong)s;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_elink_file_cache_size
+ * Signature: (II)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1file_1cache_1size
+  (JNIEnv *env, jclass clss, jint plist, jint size)
+{
+#if (H5_VERS_RELEASE > 6) /* H5_VERSION_GE(1,8,7) */
+    unsigned   sz;
+    herr_t status = -1;
+
+    sz = (unsigned)size;
+
+    status = H5Pset_elink_file_cache_size((hid_t)plist, (unsigned)sz);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+#else
+    h5unimplemented(env, "H5Pset_elink_file_cache_size: only available > 1.8.6");
+#endif
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_elink_file_cache_size
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1file_1cache_1size
+  (JNIEnv *env, jclass clss, jint plist)
+{
+#if (H5_VERS_RELEASE > 6) /* H5_VERSION_GE(1,8,7) */
+    herr_t   status;
+    unsigned  s;
+
+    status = H5Pget_elink_file_cache_size((hid_t)plist, &s);
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)s;
+#else
+    h5unimplemented(env, "H5Pget_elink_file_cache_size: only available > 1.8.6");
+    return -1;
+#endif
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_mdc_config
+ * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5AC_cache_config_t;
+ */
+JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1mdc_1config
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t     status = -1;
+    H5AC_cache_config_t cacheinfo;
+    jclass     cls;
+    jmethodID  constructor;
+    jvalue     args[30];
+    jstring    j_str = NULL;
+    jobject    ret_info_t = NULL;
+
+    memset(&cacheinfo, 0, sizeof(H5AC_cache_config_t));
+    cacheinfo.version = H5AC__CURR_CACHE_CONFIG_VERSION;
+    status = H5Pget_mdc_config((hid_t)plist, (H5AC_cache_config_t*)&cacheinfo);
+
+    if (status < 0) {
+       h5libraryError(env);
+       return NULL;
+    }
+
+    // get a reference to your class if you don't have it already
+    cls = ENVPTR->FindClass(ENVPAR "ncsa/hdf/hdf5lib/structs/H5AC_cache_config_t");
+    // get a reference to the constructor; the name is <init>
+    constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", "(IZZZLjava/lang/String;ZZJDJJJIDDZJIDDIDDZJIZDII)V");
+    args[0].i = cacheinfo.version;
+    args[1].z = cacheinfo.rpt_fcn_enabled;
+    args[2].z = cacheinfo.open_trace_file;
+    args[3].z = cacheinfo.close_trace_file;
+    if (cacheinfo.trace_file_name != NULL) {
+        j_str = ENVPTR->NewStringUTF(ENVPAR cacheinfo.trace_file_name);
+    }
+    args[4].l = j_str;
+    args[5].z = cacheinfo.evictions_enabled;
+    args[6].z = cacheinfo.set_initial_size;
+    args[7].j = (jlong)cacheinfo.initial_size;
+    args[8].d = cacheinfo.min_clean_fraction;
+    args[9].j = (jlong)cacheinfo.max_size;
+    args[10].j = (jlong)cacheinfo.min_size;
+    args[11].j = cacheinfo.epoch_length;
+    args[12].i = cacheinfo.incr_mode;
+    args[13].d = cacheinfo.lower_hr_threshold;
+    args[14].d = cacheinfo.increment;
+    args[15].z = cacheinfo.apply_max_increment;
+    args[16].j = (jlong)cacheinfo.max_increment;
+    args[17].i = cacheinfo.flash_incr_mode;
+    args[18].d = cacheinfo.flash_multiple;
+    args[19].d = cacheinfo.flash_threshold;
+    args[20].i = cacheinfo.decr_mode;
+    args[21].d = cacheinfo.upper_hr_threshold;
+    args[22].d = cacheinfo.decrement;
+    args[23].z = cacheinfo.apply_max_decrement;
+    args[24].j = (jlong)cacheinfo.max_decrement;
+    args[25].i = cacheinfo.epochs_before_eviction;
+    args[26].z = cacheinfo.apply_empty_reserve;
+    args[27].d = cacheinfo.empty_reserve;
+    args[28].i = cacheinfo.dirty_bytes_threshold;
+#if (H5_VERS_RELEASE >= 6)
+    args[29].i = cacheinfo.metadata_write_strategy;    
+#endif
+    ret_info_t = ENVPTR->NewObjectA(ENVPAR cls, constructor, args);
+    return ret_info_t;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_mdc_config
+ * Signature: (ILncsa/hdf/hdf5lib/structs/H5AC_cache_config_t;)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1mdc_1config
+  (JNIEnv *env, jclass clss, jint plist, jobject cache_config)
+{
+    herr_t      status;
+    jclass      cls;
+    jfieldID    fid;
+    jstring     j_str;
+    const char *str;
+    H5AC_cache_config_t cacheinfo;
+    
+    cls = ENVPTR->GetObjectClass(ENVPAR cache_config);
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "version", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  version");
+        return;
+    }
+    cacheinfo.version = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading version failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "rpt_fcn_enabled", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  rpt_fcn_enabled");
+        return;
+    }
+    cacheinfo.rpt_fcn_enabled = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading rpt_fcn_enabled failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "open_trace_file", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  open_trace_file");
+        return;
+    }
+    cacheinfo.open_trace_file = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading open_trace_file failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "close_trace_file", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  close_trace_file");
+        return;
+    }
+    cacheinfo.close_trace_file = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading close_trace_file failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "trace_file_name", "Ljava/lang/String;");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  trace_file_name");
+        return;
+    }
+    j_str = (jstring)ENVPTR->GetObjectField(ENVPAR cache_config, fid);
+    str = ENVPTR->GetStringUTFChars(ENVPAR j_str, NULL);
+    if (str == NULL) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: out of memory trace_file_name");
+        return;
+    }
+    strncpy(cacheinfo.trace_file_name, str, 1025);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR j_str, str);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading trace_file_name failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "evictions_enabled", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  evictions_enabled");
+        return;
+    }
+    cacheinfo.evictions_enabled = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading evictions_enabled failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "set_initial_size", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  set_initial_size");
+        return;
+    }
+    cacheinfo.set_initial_size = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading set_initial_size failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "initial_size", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  initial_size");
+        return;
+    }
+    cacheinfo.initial_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading initial_size failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "min_clean_fraction", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  min_clean_fraction");
+        return;
+    }
+    cacheinfo.min_clean_fraction = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading min_clean_fraction failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "max_size", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  max_size");
+        return;
+    }
+    cacheinfo.max_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading max_size failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "min_size", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  min_size");
+        return;
+    }
+    cacheinfo.min_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading min_size failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "epoch_length", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  epoch_length");
+        return;
+    }
+    cacheinfo.epoch_length = (long int)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading epoch_length failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "incr_mode", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  incr_mode");
+        return;
+    }
+    cacheinfo.incr_mode = (enum H5C_cache_incr_mode)ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading incr_mode failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "lower_hr_threshold", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  lower_hr_threshold");
+        return;
+    }
+    cacheinfo.lower_hr_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading lower_hr_threshold failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "increment", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  increment");
+        return;
+    }
+    cacheinfo.increment = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading increment failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_max_increment", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  apply_max_increment");
+        return;
+    }
+    cacheinfo.apply_max_increment = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_max_increment failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "max_increment", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  max_increment");
+        return;
+    }
+    cacheinfo.max_increment = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading max_increment failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_incr_mode", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  flash_incr_mode");
+        return;
+    }
+    cacheinfo.flash_incr_mode = (enum H5C_cache_flash_incr_mode)ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_incr_mode failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_multiple", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  flash_multiple");
+        return;
+    }
+    cacheinfo.flash_multiple = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_multiple failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_threshold", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  flash_threshold");
+        return;
+    }
+    cacheinfo.flash_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_threshold failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "decr_mode", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  decr_mode");
+        return;
+    }
+    cacheinfo.decr_mode = (enum H5C_cache_decr_mode)ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading decr_mode failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "upper_hr_threshold", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  upper_hr_threshold");
+        return;
+    }
+    cacheinfo.upper_hr_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading upper_hr_threshold failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "decrement", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  decrement");
+        return;
+    }
+    cacheinfo.decrement = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading decrement failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_max_decrement", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  apply_max_decrement");
+        return;
+    }
+    cacheinfo.apply_max_decrement = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_max_decrement failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "max_decrement", "J");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  max_decrement");
+        return;
+    }
+    cacheinfo.max_decrement = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading max_decrement failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "epochs_before_eviction", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  epochs_before_eviction");
+        return;
+    }
+    cacheinfo.epochs_before_eviction = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading epochs_before_eviction failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_empty_reserve", "Z");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  apply_empty_reserve");
+        return;
+    }
+    cacheinfo.apply_empty_reserve = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_empty_reserve failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "empty_reserve", "D");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  empty_reserve");
+        return;
+    }
+    cacheinfo.empty_reserve = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading empty_reserve failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "dirty_bytes_threshold", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  dirty_bytes_threshold");
+        return;
+    }
+    cacheinfo.dirty_bytes_threshold = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading dirty_bytes_threshold failed");
+        return;
+    }
+    
+    fid = ENVPTR->GetFieldID(ENVPAR cls, "metadata_write_strategy", "I");
+    if(fid == 0) {
+        h5badArgument(env, "H5Pset_mdc_config:  metadata_write_strategy");
+        return;
+    }
+#if (H5_VERS_RELEASE >= 6)
+    cacheinfo.metadata_write_strategy = ENVPTR->GetIntField(ENVPAR cache_config, fid);    
+#endif
+    if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+        h5JNIFatalError(env, "H5Pset_mdc_config: loading metadata_write_strategy failed");
+        return;
+    }
+    
+    status = H5Pset_mdc_config((hid_t)plist, (H5AC_cache_config_t*)&cacheinfo);
+
+    if (status < 0) {
+       h5libraryError(env);
+       return;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_chunk_cache
+ * Signature: (IJJD)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1chunk_1cache
+(JNIEnv *env, jclass clss, jint dapl, jlong rdcc_nslots,
+        jlong rdcc_nbytes, jdouble rdcc_w0)
+{
+    herr_t retVal = -1;
+
+    retVal = H5Pset_chunk_cache((hid_t)dapl, (size_t)rdcc_nslots,
+            (size_t)rdcc_nbytes, (double) rdcc_w0);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_chunk_cache
+ * Signature: (I[J[J[D)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1chunk_1cache
+(JNIEnv *env, jclass clss, jint dapl, jlongArray rdcc_nslots, 
+        jlongArray rdcc_nbytes, jdoubleArray rdcc_w0)
+{
+    herr_t   status;
+    jint     mode;
+    jdouble *w0Array;
+    jlong   *rdcc_nslotsArray;
+    jlong   *nbytesArray;
+    jboolean isCopy;
+
+    if (rdcc_w0 == NULL) {
+        w0Array = (jdouble *)NULL;
+    }
+    else {
+        w0Array = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR rdcc_w0, &isCopy);
+        if (w0Array == NULL) {
+            h5JNIFatalError(env, "H5Pget_chunk_cache:  w0_array array not pinned");
+            return;
+        }
+    }
+
+    if (rdcc_nslots == NULL) {
+        rdcc_nslotsArray = (jlong *)NULL;
+    }
+    else {
+        rdcc_nslotsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nslots, &isCopy);
+        if (rdcc_nslotsArray == NULL) {
+            /* exception -- out of memory */
+            if (w0Array != NULL) {
+                ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+            }
+            h5JNIFatalError(env, "H5Pget_chunk_cache:  rdcc_nslots array not pinned");
+            return;
+        }
+    }
+
+    if (rdcc_nbytes == NULL) {
+        nbytesArray = (jlong *)NULL;
+    }
+    else {
+        nbytesArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nbytes, &isCopy);
+        if (nbytesArray == NULL) {
+            if (w0Array != NULL) {
+                ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+            }
+            if (rdcc_nslotsArray != NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nslots, rdcc_nslotsArray, JNI_ABORT);
+            }
+            h5JNIFatalError(env, "H5Pget_chunk_cache:  nbytesArray array not pinned");
+            return;
+        }
+    }
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        long long rdcc_nslots_temp = *(rdcc_nslotsArray);
+        size_t rdcc_nslots_t = (size_t)rdcc_nslots_temp;
+        long long nbytes_temp = *(nbytesArray);
+        size_t nbytes_t = (size_t)nbytes_temp;
+
+        status = H5Pget_chunk_cache((hid_t)dapl, &rdcc_nslots_t, &nbytes_t, (double *)w0Array);
+
+        *rdcc_nslotsArray = rdcc_nslots_t;
+        *nbytesArray = nbytes_t;
+    }
+
+    if (status < 0) {
+        mode = JNI_ABORT;
+    }
+    else {
+        mode = 0; /* commit and free */
+    }
+
+    if (rdcc_nslotsArray != NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nslots, rdcc_nslotsArray, mode);
+    }
+
+    if (nbytesArray != NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nbytes, nbytesArray, mode);
+    }
+
+    if (w0Array != NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, mode);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_obj_track_times
+ * Signature: (I)Z  
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1obj_1track_1times
+  (JNIEnv *env, jclass clss, jint objplid) 
+{
+    herr_t   status;
+    hbool_t  track_times;
+
+    status = H5Pget_obj_track_times((hid_t)objplid, &track_times);
+
+    if (status < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+    
+    if (track_times == 1) {
+        return JNI_TRUE;
+    }
+    return JNI_FALSE;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_obj_track_times
+ * Signature: (IZ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1obj_1track_1times
+  (JNIEnv *env, jclass clss, jint objplid, jboolean track_times)
+{
+    herr_t   status;
+    hbool_t  track;
+
+    if (track_times == JNI_TRUE) {
+        track = 1;
+    }
+    else {
+        track = 0;
+    }
+
+    status = H5Pset_obj_track_times((hid_t)objplid, track);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_char_encoding
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1char_1encoding
+  (JNIEnv *env, jclass clss, jint acpl)
+{
+    herr_t   status;
+    H5T_cset_t  encoding;
+
+    status = H5Pget_char_encoding((hid_t)acpl, &encoding);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    
+    return encoding;
+    
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_char_encoding
+ * Signature: (II)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1char_1encoding
+  (JNIEnv *env, jclass clss, jint acpl, jint encoding)
+{
+    herr_t   status;
+
+    status = H5Pset_char_encoding((hid_t)acpl, (H5T_cset_t)encoding);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    
+    return;
+    
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5pImp.h b/source/c/hdf-java/h5pImp.h
new file mode 100644
index 0000000..5e7a55e
--- /dev/null
+++ b/source/c/hdf-java/h5pImp.h
@@ -0,0 +1,1165 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5_H5P */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5P
+#define _Included_ncsa_hdf_hdf5lib_H5_H5P
+#ifdef __cplusplus
+extern "C" {
+#endif
+    
+    extern JavaVM *jvm;
+    extern jobject visit_callback;   
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pall_filters_avail
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pall_1filters_1avail
+      (JNIEnv *env, jclass clss, jint dcpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pclose
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pclose
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pclose_list
+     * Signature: (I)I   
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pclose_1class
+      (JNIEnv *env, jclass clss, jint plid);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pcopy
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pcopy
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pcopy_prop
+     * Signature: (IILjava/lang/String;)I 
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pcopy_1prop
+      (JNIEnv *env, jclass clss, jint dst_plid, jint src_plid, jstring name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pcreate
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pcreate
+      (JNIEnv *env, jclass clss, jint type);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pequal
+     * Signature: (II)I    
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pequal
+      (JNIEnv *env, jclass clss, jint plid1, jint plid2);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pexist
+     * Signature: (ILjava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pexist
+      (JNIEnv *env, jclass clss, jint plid, jstring name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pfill_value_defined
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pfill_1value_1defined
+      (JNIEnv *env, jclass clss, jint plist, jintArray status);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget
+     * Signature: (ILjava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget
+      (JNIEnv *env, jclass clss, jint plid, jstring name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_alignment
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1alignment
+      (JNIEnv *env, jclass clss, jint plist, jlongArray alignment);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_alloc_time
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1alloc_1time
+      (JNIEnv *env, jclass clss, jint plist, jintArray alloc_time);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_attr_creation_order
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1attr_1creation_1order
+      (JNIEnv *env, jclass clss, jint ocpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_attr_phase_change
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1attr_1phase_1change
+      (JNIEnv *env, jclass clss, jint ocpl_id, jintArray attributes);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_btree_ratios
+     * Signature: (I[D[D[D)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1btree_1ratios
+      (JNIEnv *env, jclass clss, jint plist_id, jdoubleArray left, jdoubleArray middle, jdoubleArray right);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_buffer
+     * Signature: (I[B[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1buffer
+      (JNIEnv *env, jclass clss, jint plist, jbyteArray tconv, jbyteArray bkg);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_buffer_size
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1buffer_1size
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_cache
+     * Signature: (I[I[J[J[D)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1cache
+      (JNIEnv *env, jclass clss, jint plist, jintArray mdc_nelmts,
+      jlongArray rdcc_nelmts, jlongArray rdcc_nbytes, jdoubleArray rdcc_w0);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_char_encoding
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1char_1encoding
+      (JNIEnv *env, jclass clss, jint acpl);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_chunk
+     * Signature: (II[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1chunk
+      (JNIEnv *env, jclass clss, jint plist, jint max_ndims, jlongArray dims);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_chunk_cache
+     * Signature: (I[J[J[D)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1chunk_1cache
+      (JNIEnv *env, jclass clss, jint dapl, jlongArray rdcc_nslots, 
+              jlongArray rdcc_nbytes, jdoubleArray rdcc_w0);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_class
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_class_name
+     * Signature: (I)Ljava/lang/String; 
+     */
+    JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class_1name
+      (JNIEnv *env, jclass clss, jint plid);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_class_parent
+     * Signature: (I)I   
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1class_1parent
+      (JNIEnv *env, jclass clss, jint plid);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_copy_object
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1copy_1object
+      (JNIEnv *env, jclass clss, jint ocp_plist_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_create_intermediate_group
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1create_1intermediate_1group
+      (JNIEnv *env, jclass clss, jint lcpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_data_transform
+     * Signature: (I[Ljava/lang/String;J)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1data_1transform
+      (JNIEnv *env, jclass clss, jint plist_id, jobjectArray expression, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_driver
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1driver
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_edc_check
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1edc_1check
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_elink_acc_flags
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1acc_1flags
+      (JNIEnv *env, jclass clss, jint lapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    _H5Pget_elink_fapl
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Pget_1elink_1fapl
+      (JNIEnv *env, jclass clss, jint lapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_elink_file_cache_size
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1file_1cache_1size
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_elink_prefix
+     * Signature: (I[Ljava/lang/String;)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1elink_1prefix
+      (JNIEnv *env, jclass clss, jint lapl_id, jobjectArray prefix);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_est_link_info
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1est_1link_1info
+      (JNIEnv *env, jclass clss, jint gcpl_id, jintArray link_info);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_external
+     * Signature: (IIJ[Ljava/lang/String;[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1external
+      (JNIEnv *env, jclass clss, jint plist, jint idx, jlong name_size,
+      jobjectArray name, jlongArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_external_count
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1external_1count
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_family_offset
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1family_1offset
+      (JNIEnv *env, jclass clss, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fapl_core
+     * Signature: (I[J[Z)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1core
+      (JNIEnv *env, jclass clss, jint fapl_id, jlongArray increment, jbooleanArray backing_store);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fapl_direct
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1direct
+     (JNIEnv *env, jclass clss, jint fapl_id, jlongArray info);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fapl_family
+     * Signature: (I[J[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1family
+      (JNIEnv *env, jclass clss, jint tid, jlongArray memb_size, jintArray memb_plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fapl_muti
+     * Signature: (I[I[I[Ljava/lang/String;[J)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fapl_1multi
+      (JNIEnv *env, jclass clss, jint tid, jintArray memb_map, jintArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fclose_degree
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fclose_1degree
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fill_time
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fill_1time
+      (JNIEnv *env, jclass clss, jint plist, jintArray fill_time);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_fill_value
+     * Signature: (II[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1fill_1value
+      (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_filter
+     * Signature: (II[I[J[IJ[Ljava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter
+      (JNIEnv *env, jclass clss, jint plist, jint filter_number, jintArray flags,
+      jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_filter2
+     * Signature: (II[I[J[IJ[Ljava/lang/String;[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter2
+      (JNIEnv *env, jclass clss, jint plist, jint filter_number, jintArray flags,
+      jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name, 
+      jintArray filter_config);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_filter_by_id
+     * Signature: (II[I[J[IJ[Ljava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id
+      (JNIEnv *env, jclass clss, jint plist, jint filter, jintArray flags,
+      jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_filter_by_id2
+     * Signature: (II[I[J[IJ[Ljava/lang/String;[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id2
+      (JNIEnv *env, jclass clss, jint plist, jint filter, jintArray flags,
+      jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name, jintArray filter_config);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_gc_references
+     * Signature: (I[Z)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1gc_1references
+      (JNIEnv *env, jclass clss, jint fapl_id, jbooleanArray gc_ref);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_gcreferences
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1gcreferences
+      (JNIEnv *env, jclass clss, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_hyper_vector_size
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1hyper_1vector_1size
+      (JNIEnv *env, jclass clss, jint plist, jlongArray vector_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_istore_k
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1istore_1k
+      (JNIEnv *env, jclass clss, jint plist, jintArray ik);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_layout
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1layout
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_libver_bounds
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1libver_1bounds
+      (JNIEnv *env, jclass clss, jint fapl_id, jintArray libver);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_link_creation_order
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1link_1creation_1order
+      (JNIEnv *env, jclass clss, jint gcpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_link_phase_change
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1link_1phase_1change
+      (JNIEnv *env, jclass clss, jint gcpl_id, jintArray links);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_local_heap_size_hint
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint
+      (JNIEnv *env, jclass clss, jint gcpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_mdc_config
+     * Signature: (I)Lncsa/hdf/hdf5lib/structs/H5AC_cache_config_t;
+     */
+    JNIEXPORT jobject JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1mdc_1config
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_meta_block_size
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1meta_1block_1size
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_nfilters
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nfilters
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_nlinks
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nlinks
+      (JNIEnv *env, jclass clss, jint lapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_nprops
+     * Signature: (I)J  
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1nprops
+      (JNIEnv *env, jclass clss, jint plid);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_obj_track_times
+     * Signature: (I)Z  
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1obj_1track_1times
+      (JNIEnv *env, jclass clss, jint objplid);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_preserve
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1preserve
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_sieve_buf_size
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sieve_1buf_1size
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_shared_mesg_index
+     * Signature: (II[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1index
+      (JNIEnv *env, jclass clss, jint fcpl_id, jint index_num, jintArray mesg_info);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_shared_mesg_nindexes
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1nindexes
+      (JNIEnv *env, jclass clss, jint fcpl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_shared_mesg_phase_change
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1phase_1change
+      (JNIEnv *env, jclass clss, jint fcpl_id, jintArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_size
+     * Signature: (ILjava/lang/String;)J 
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1size
+      (JNIEnv *env, jclass clss, jint plid, jstring name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_sizes
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sizes
+      (JNIEnv *env, jclass clss, jint plist, jlongArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_small_data_block_size
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size
+      (JNIEnv *env, jclass clss, jint plist, jlongArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_small_data_block_size_long
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size_1long
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_sym_k
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1sym_1k
+      (JNIEnv *env, jclass clss, jint plist, jintArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_userblock
+     * Signature: (I[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1userblock
+      (JNIEnv *env, jclass clss, jint plist, jlongArray size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pget_version
+     * Signature: (I[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pget_1version
+      (JNIEnv *env, jclass clss, jint plist, jintArray version_info);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pisa_class
+     * Signature: (II)I    
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pisa_1class
+      (JNIEnv *env, jclass clss, jint plid, jint pcls);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pmodify_filter
+     * Signature: (IIIJ[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pmodify_1filter
+      (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+      jlong cd_nelmts, jintArray cd_values);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Premove
+     * Signature: (ILjava/lang/String;)I 
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Premove
+      (JNIEnv *env, jclass clss, jint plid, jstring name);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Premove_filter
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5P1remove_1filter
+      (JNIEnv *env, jclass clss, jint obj_id, jint filter);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset
+     * Signature: (ILjava/lang/String;I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset
+      (JNIEnv *env, jclass clss, jint plid, jstring name, jint val);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_alignment
+     * Signature: (IJJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1alignment
+      (JNIEnv *env, jclass clss, jint plist, jlong threshold, jlong alignment);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_alloc_time
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1alloc_1time
+      (JNIEnv *env, jclass clss, jint plist, jint alloc_time);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_attr_creation_order
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1attr_1creation_1order
+      (JNIEnv *env, jclass clss, jint ocpl_id, jint crt_order_flags);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_btree_ratios
+     * Signature: (IDDD)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1btree_1ratios
+      (JNIEnv *env, jclass clss, jint plist_id, jdouble left, jdouble middle, jdouble right);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_buffer
+     * Signature: (IJ[B[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1buffer
+      (JNIEnv *env, jclass clss, jint plist, jlong size, jbyteArray tconv, jbyteArray bkg);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_buffer_size
+     * Signature: (IJ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1buffer_1size
+      (JNIEnv *env, jclass clss, jint plist, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_cache
+     * Signature: (IIJJD)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1cache
+      (JNIEnv *env, jclass clss, jint plist, jint mdc_nelmts, jlong rdcc_nelmts,
+      jlong rdcc_nbytes, jdouble rdcc_w0);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_char_encoding
+     * Signature: (II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1char_1encoding
+      (JNIEnv *env, jclass clss, jint acpl, jint encoding);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_chunk
+     * Signature: (II[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1chunk
+      (JNIEnv *env, jclass clss, jint plist, jint ndims, jbyteArray dim);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_chunk_cache
+     * Signature: (IJJD)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1chunk_1cache
+      (JNIEnv *env, jclass clss, jint dapl, jlong rdcc_nslots,
+      jlong rdcc_nbytes, jdouble rdcc_w0);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_copy_object
+     * Signature: (II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1copy_1object
+      (JNIEnv *env, jclass clss, jint ocp_plist_id, jint copy_options);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_create_intermediate_group
+     * Signature: (IZ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1create_1intermediate_1group
+      (JNIEnv *env, jclass clss, jint lcpl_id, jboolean crt_intermed_group);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_data_transform
+     * Signature: (ILjava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1data_1transform
+      (JNIEnv *env, jclass clss, jint plist_id, jstring expression);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_deflate
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1deflate
+      (JNIEnv *env, jclass clss, jint plist, jint level);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_edc_check
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1edc_1check
+      (JNIEnv *env, jclass clss, jint plist, jint check);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_elink_acc_flags
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1acc_1flags
+      (JNIEnv *env, jclass clss, jint lapl_id, jint flags);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_elink_fapl
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1fapl
+      (JNIEnv *env, jclass clss, jint lapl_id, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_elink_file_cache_size
+     * Signature: (II)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1file_1cache_1size
+      (JNIEnv *env, jclass clss, jint plist, jint size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_elink_prefix
+     * Signature: (ILjava/lang/String;)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1elink_1prefix
+      (JNIEnv *env, jclass clss, jint lapl_id, jstring prefix);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_est_link_info
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1est_1link_1info
+      (JNIEnv *env, jclass clss, jint gcpl_id, jint est_num_entries, jint est_name_len);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_external
+     * Signature: (ILjava/lang/String;JJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1external
+      (JNIEnv *env, jclass clss, jint plist, jstring name, jlong offset, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_family_offset
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1family_1offset
+      (JNIEnv *env, jclass clss, jint fapl_id, jlong offset);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_core
+     * Signature: (IJZ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1core
+      (JNIEnv *env, jclass clss, jint fapl_id, jlong increment, jboolean backing_store);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_direct
+     * Signature: (IJJJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1direct
+     (JNIEnv *env, jclass clss, jint fapl_id, jlong alignment, jlong block_size, jlong cbuf_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_family
+     * Signature: (IJI)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1family
+      (JNIEnv *env, jclass clss, jint plist, jlong memb_size, jint memb_plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_log
+     * Signature: (ILjava/lang/String;JJ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1log
+      (JNIEnv *env, jclass clss, jint fapl_id, jstring logfile, jlong flags, jlong buf_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_muti
+     * Signature: (I[I[I[Ljava/lang/String;[JZ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1multi
+      (JNIEnv *env, jclass clss, jint tid, jintArray memb_map, jintArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr, jboolean relax);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_sec2
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1sec2
+      (JNIEnv *env, jclass clss, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_split
+     * Signature: (ILjava/lang/String;ILjava/lang/String;I)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1split
+      (JNIEnv *env, jclass clss, jint fapl_id, jstring metaext, jint meta_pl_id, jstring rawext, jint raw_pl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_stdio
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1stdio
+      (JNIEnv *env, jclass clss, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fapl_windows
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fapl_1windows
+      (JNIEnv *env, jclass clss, jint fapl_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fclose_degree
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fclose_1degree
+      (JNIEnv *env, jclass clss, jint plist, jint fc_degree);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fill_time
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fill_1time
+      (JNIEnv *env, jclass clss, jint plist, jint fill_time);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fill_value
+     * Signature: (II[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fill_1value
+      (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_filter
+     * Signature: (IIIJ[I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1filter
+      (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+      jlong cd_nelmts, jintArray cd_values);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_fletcher32
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1fletcher32
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_gc_references
+     * Signature: (IZ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1gc_1references
+      (JNIEnv *env, jclass clss, jint fapl_id, jboolean gc_ref);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_hyper_vector_size
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1hyper_1vector_1size
+      (JNIEnv *env, jclass clss, jint plist, jlong vector_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_istore_k
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1istore_1k
+      (JNIEnv *env, jclass clss, jint plist, jint ik);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_layout
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1layout
+      (JNIEnv *env, jclass clss, jint plist, jint layout);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_libver_bounds
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1libver_1bounds
+      (JNIEnv *env, jclass clss, jint fapl_id, jint low, jint high);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_link_creation_order
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1link_1creation_1order
+      (JNIEnv *env, jclass clss, jint gcpl_id, jint crt_order_flags);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_link_phase_change
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1link_1phase_1change
+      (JNIEnv *env, jclass clss, jint gcpl_id, jint max_compact, jint min_dense);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_local_heap_size_hint
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint
+      (JNIEnv *env, jclass clss, jint gcpl_id, jlong size_hint);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_mdc_config
+     * Signature: (ILncsa/hdf/hdf5lib/structs/H5AC_cache_config_t;)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1mdc_1config
+      (JNIEnv *env, jclass clss, jint plist, jobject cache_config);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_meta_block_size
+     * Signature: (IJ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1meta_1block_1size
+      (JNIEnv *env, jclass clss, jint fapl_id, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_nbit
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1nbit
+      (JNIEnv *env, jclass clss, jint plist_id);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_nlinks
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1nlinks
+      (JNIEnv *env, jclass clss, jint lapl_id, jlong nlinks);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_obj_track_times
+     * Signature: (IZ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1obj_1track_1times
+      (JNIEnv *env, jclass clss, jint objplid, jboolean track_times);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_preserve
+     * Signature: (IZ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1preserve
+      (JNIEnv *env, jclass clss, jint plist, jboolean status);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_scaleoffset
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1scaleoffset
+      (JNIEnv *env, jclass clss, jint plist_id, jint scale_type, jint scale_factor);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_sieve_buf_size
+     * Signature: (IJ)V
+     */
+    JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sieve_1buf_1size
+      (JNIEnv *env, jclass clss, jint fapl_id, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_shared_mesg_index
+     * Signature: (IIII)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1index
+      (JNIEnv *env, jclass clss, jint fcpl_id, jint index_num, jint mesg_type_flags, jint min_mesg_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_shared_mesg_nindexes
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1nindexes
+      (JNIEnv *env, jclass clss, jint plist_id, jint nindexes);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_shared_mesg_phase_change
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1phase_1change
+      (JNIEnv *env, jclass clss, jint fcpl_id, jint max_list, jint min_btree);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_shuffle
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1shuffle
+      (JNIEnv *env, jclass clss, jint plist);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_sizes
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sizes
+      (JNIEnv *env, jclass clss, jint plist, jint sizeof_addr, jint sizeof_size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_small_data_block_size
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1small_1data_1block_1size
+      (JNIEnv *env, jclass clss, jint plist, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_sym_k
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1sym_1k
+      (JNIEnv *env, jclass clss, jint plist, jint ik, jint lk);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_szip
+     * Signature: (III)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1szip
+      (JNIEnv *env, jclass clss, jint plist, jint options_mask, jint pixels_per_block);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Pset_userblock
+     * Signature: (IJ)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Pset_1userblock
+      (JNIEnv *env, jclass clss, jint plist, jlong size);
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Punregister
+     * Signature: (ILjava/lang/String;)I  
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Punregister
+      (JNIEnv *env, jclass clss, jint plid, jstring name);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5rImp.c b/source/c/hdf-java/h5rImp.c
new file mode 100755
index 0000000..7bedf6e
--- /dev/null
+++ b/source/c/hdf-java/h5rImp.c
@@ -0,0 +1,338 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Reference API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include "h5jni.h"
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rcreate
+ * Signature: ([BILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Rcreate
+  (JNIEnv *env, jclass clss,
+  jbyteArray ref, jint loc_id, jstring name, jint ref_type, jint space_id)
+{
+    char* rName;
+    jboolean isCopy;
+    herr_t status;
+    jbyte *refP;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rcreate:  ref is NULL");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Rcreate:  name is NULL");
+        return -1;
+    }
+    if (ref_type == H5R_OBJECT) {
+        if (ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+            h5badArgument( env, "H5Rcreate:  ref input array != H5R_OBJ_REF_BUF_SIZE");
+            return -1;
+        }
+    }
+    else if (ref_type == H5R_DATASET_REGION) {
+        if (ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+            h5badArgument( env, "H5Rcreate:  region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+            return -1;
+        }
+    }
+    else {
+        h5badArgument( env, "H5Rcreate:  ref_type unknown type ");
+        return -1;
+    }
+
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rcreate:  ref not pinned");
+        return -1;
+    }
+    rName = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (rName == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR ref,refP,JNI_ABORT);
+        h5JNIFatalError(env,  "H5Rcreate:  name not pinned");
+        return -1;
+    }
+
+    status = H5Rcreate(refP, loc_id, rName, (H5R_type_t)ref_type, space_id);
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, rName);
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+        h5libraryError(env);
+    }
+    else {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, 0);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rdereference
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Rdereference
+  (JNIEnv *env, jclass clss, jint dataset, jint ref_type,
+  jbyteArray ref )
+{
+    jboolean isCopy;
+    jbyte *refP;
+    hid_t status;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rdereference:  ref is NULL");
+        return -1;
+    }
+    if ((ref_type == H5R_OBJECT) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+        h5badArgument( env, "H5Rdereference:  obj ref input array != H5R_OBJ_REF_BUF_SIZE");
+        return -1;
+    }
+    else if ((ref_type == H5R_DATASET_REGION)
+        && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+        h5badArgument( env, "H5Rdereference:  region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+        return -1;
+    }
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rderefernce:  ref not pinned");
+        return -1;
+    }
+
+    status = H5Rdereference((hid_t)dataset, (H5R_type_t)ref_type, refP);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rget_region
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Rget_1region
+  (JNIEnv *env, jclass clss, jint dataset, jint ref_type,
+  jbyteArray ref )
+{
+    hid_t status;
+    jboolean isCopy;
+    jbyte *refP;
+
+    if (ref_type != H5R_DATASET_REGION)  {
+        h5badArgument( env, "H5Rget_region:  bad ref_type ");
+        return -1;
+    }
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_region:  ref is NULL");
+        return -1;
+    }
+    if ( ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+        h5badArgument( env, "H5Rget_region:  region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+        return -1;
+    }
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_region:  ref not pinned");
+        return -1;
+    }
+
+    status = H5Rget_region((hid_t)dataset, (H5R_type_t)ref_type, refP);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5G_obj_t H5Rget_obj_type
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Rget_1obj_1type
+  (JNIEnv *env, jclass clss, jint loc_id, jint ref_type, jbyteArray ref)
+{
+    int retVal =-1;
+    jboolean isCopy;
+    jbyte *refP;
+    H5O_type_t object_info;
+
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_object_type:  ref is NULL");
+        return -1;
+    }
+
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_object_type:  ref not pinned");
+        return -1;
+    }
+
+    retVal = H5Rget_obj_type2((hid_t)loc_id, (H5R_type_t)ref_type, refP, &object_info);
+    if(retVal >= 0)
+        retVal = object_info;
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    int H5Rget_obj_type2
+ * Signature: (II[B[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Rget_1obj_1type2
+  (JNIEnv *env, jclass clss, jint loc_id, jint ref_type, jbyteArray ref, jintArray ref_obj)
+{
+
+    jint status;
+    jboolean isCopy;
+    jbyte *refP;
+    jint *ref_objP;
+  int retVal;
+
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_object_type:  ref is NULL");
+        return -1;
+    }
+    if (ref_obj == NULL) {
+        h5nullArgument( env, "H5Rget_object_type:  ref_obj is NULL");
+        return -1;
+    }
+
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_object_type:  ref not pinned");
+        return -1;
+    }
+    ref_objP = (jint *)ENVPTR->GetIntArrayElements(ENVPAR ref_obj, &isCopy);
+    if (ref_objP == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR ref,refP,0);
+        h5JNIFatalError(env,  "H5Rget_object_type:  ref_obj not pinned");
+        return -1;
+    }
+
+    status = H5Rget_obj_type2((hid_t)loc_id, (H5R_type_t)ref_type, refP, (H5O_type_t*)ref_objP);
+    retVal = ref_objP[0];
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR ref_obj,ref_objP, JNI_ABORT);
+        h5libraryError(env);
+    } 
+    else {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR ref_obj, ref_objP, 0);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rget_name
+ * Signature: (II[B[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Rget_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jint ref_type, jbyteArray ref, jobjectArray name, jlong size)
+{
+    jlong ret_val = -1;
+    jbyte *refP;
+    jboolean isCopy;
+    char *aName=NULL;
+    jstring str;
+    size_t bs;
+
+    bs = (long)size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Rget_name:  size <= 0");
+        return -1;
+    }
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_name:  ref is NULL");
+        return -1;
+    }
+
+    if ((ref_type == H5R_OBJECT) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+        h5badArgument( env, "H5Rdereference:  obj ref input array != H5R_OBJ_REF_BUF_SIZE");
+        return -1;
+    } 
+    else if ((ref_type == H5R_DATASET_REGION)
+            && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+        h5badArgument( env, "H5Rdereference:  region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+        return -1;
+    }
+
+    refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rcreate:  ref not pinned");
+        return -1;
+    }
+
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+        h5outOfMemory( env, "H5Aget_name:  malloc failed");
+        return -1;
+    }
+
+    ret_val = (jlong) H5Rget_name( (hid_t)loc_id, (H5R_type_t) ref_type, refP, aName, bs) ;
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+    if (ret_val < 0) {
+        free(aName);
+        h5libraryError(env);
+        return -1;
+    }
+
+    str = ENVPTR->NewStringUTF(ENVPAR aName);
+    ENVPTR->SetObjectArrayElement(ENVPAR name, 0, str);
+
+    if (aName) free (aName);
+
+    return ret_val;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5sImp.c b/source/c/hdf-java/h5sImp.c
new file mode 100755
index 0000000..2684074
--- /dev/null
+++ b/source/c/hdf-java/h5sImp.c
@@ -0,0 +1,1238 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Dataspace Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5sImp.h"
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Screate
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Screate(JNIEnv *env,
+            jclass clss, jint type) {
+        hid_t retVal = -1;
+        
+        retVal = H5Screate((H5S_class_t) type);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Screate_simple
+     * Signature: (I[J[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Screate_1simple(
+            JNIEnv *env, jclass clss, jint rank, jlongArray dims,
+            jlongArray maxdims) {
+        hid_t status;
+        jlong *dimsP, *maxdimsP;
+        jboolean isCopy;
+        hsize_t *sa = NULL;
+        hsize_t *msa = NULL;
+        int i;
+        int drank, mrank;
+        hsize_t *lp;
+        jlong *jlp;
+
+        if (rank < 0) {
+            h5badArgument(env, "H5Screate_simple:  rank is invalid");
+            return -1;
+        }
+        if (dims == NULL) {
+            h5nullArgument(env, "H5Screate_simple:  dims is NULL");
+            return -1;
+        }
+        drank = (int) ENVPTR->GetArrayLength(ENVPAR dims);
+        if (drank != rank) {
+            h5badArgument(env, "H5Screate_simple:  dims rank is invalid");
+            return -1;
+        }
+        if(maxdims != NULL) {
+            mrank = (int) ENVPTR->GetArrayLength(ENVPAR maxdims);
+            if (mrank != rank) {
+                h5badArgument(env, "H5Screate_simple:  maxdims rank is invalid");
+                return -1;
+            }
+        }
+        dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+        if (dimsP == NULL) {
+            h5JNIFatalError(env, "H5Screate_simple:  dims not pinned");
+            return -1;
+        }
+
+        sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (sa == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+            h5JNIFatalError(env, "H5Screate_simple:  dims not converted to hsize_t");
+            return -1;
+        }
+
+        jlp = (jlong *)dimsP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+
+        if (maxdims == NULL) {
+            maxdimsP = NULL;
+            msa = (hsize_t *)maxdimsP;
+        } 
+        else {
+            maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims, &isCopy);
+            if (maxdimsP == NULL)  {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+                free (sa);
+                h5JNIFatalError(env, "H5Screate_simple:  maxdims not pinned");
+                return -1;
+            }
+            msa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+            if (msa == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+                free (sa);
+                h5JNIFatalError(env, "H5Screate_simple:  dims not converted to hsize_t");
+                return -1;
+            }
+            jlp = (jlong *)maxdimsP;
+            for (i = 0; i < mrank; i++) {
+                *lp = (hsize_t)*jlp;
+                lp++;
+                jlp++;
+            }
+        }
+
+        status = H5Screate_simple(rank, (const hsize_t *)sa, (const hsize_t *)msa);
+        
+        if (maxdimsP != NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+            if (msa) 
+                free (msa);
+        }
+
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+        if (sa) 
+            free (sa);
+
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Scopy
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Scopy(JNIEnv *env,
+            jclass clss, jint space_id) {
+        hid_t retVal = -1;
+        
+        retVal = H5Scopy(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+#ifdef notdef
+    // 10/28/99 -- added code to copy the array -- this is not used,
+    // but serves as a reminder in case we try to implement this in
+    // the future....
+    /*
+     *  Note:  the argument coord is actually long coord[][], which has been
+     *         flattened by the caller.
+     */
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_elements
+     * Signature: (III[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1elements
+    (JNIEnv *env, jclass clss, jint space_id, jint op, jint num_elemn, jlongArray coord)
+    {
+        herr_t status;
+        jint i;
+        jlong *P;
+        jboolean isCopy;
+        hssize_t *sa;
+        int rank;
+
+        if (coord == NULL) {
+            h5nullArgument( env, "H5Sselect_elements:  coord is NULL");
+            return -1;
+        }
+
+        P = ENVPTR->GetLongArrayElements(ENVPAR env,coord,&isCopy);
+        if (P == NULL) {
+            h5JNIFatalError(env, "H5Sselect_elements:  coord not pinned");
+            return -1;
+        }
+        sa = (hssize_t *)malloc( num_elems * 2 * sizeof(hssize_t));
+        if (sa == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR env,coord,P,JNI_ABORT);
+            h5JNIFatalError(env, "H5Sselect_elements:  coord array not converted to hssize_t");
+            return -1;
+        }
+        for (i= 0; i < (num_elsms * 2); i++) {
+            sa[i] = P[i];
+        }
+
+        status = H5Sselect_elements (space_id, (H5S_seloper_t)op, num_elemn, (const hssize_t **)&sa);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR env, coord, P, JNI_ABORT);
+        free(sa);
+
+        if (status < 0)
+            h5libraryError(env);
+
+
+        return (jint)status;
+    }
+#endif
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_elements
+     * Signature: (III[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1elements(
+            JNIEnv *env, jclass clss, jint space_id, jint op, jint num_elemn,
+            jbyteArray coord) {
+        int ii;
+        hsize_t *lp = NULL;
+        hsize_t *llp;
+        jlong *jlp;
+        herr_t status;
+        jbyte *P;
+        jboolean isCopy;
+        jsize size;
+        int nlongs;
+
+        if (coord == NULL) {
+            h5nullArgument(env, "H5Sselect_elements:  coord is NULL");
+            return -1;
+        }
+
+        P = ENVPTR->GetByteArrayElements(ENVPAR coord, &isCopy);
+        if (P == NULL) {
+            h5JNIFatalError(env, "H5Sselect_elements:  coord not pinned");
+            return -1;
+        }
+        size = (int) ENVPTR->GetArrayLength(ENVPAR coord);
+        nlongs = size / sizeof(jlong);
+        lp = (hsize_t *)malloc(nlongs * sizeof(hsize_t));
+        jlp = (jlong *)P;
+        llp = lp;
+        for (ii = 0; ii < nlongs; ii++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+
+        status = H5Sselect_elements (space_id, (H5S_seloper_t)op, num_elemn, (const hsize_t *)llp);
+
+        ENVPTR->ReleaseByteArrayElements(ENVPAR coord, P, JNI_ABORT);
+
+        if (llp) free (llp);
+
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_all
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1all(JNIEnv *env,
+            jclass clss, jint space_id) {
+        herr_t retVal = -1;
+        
+        retVal = H5Sselect_all(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_none
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1none(JNIEnv *env,
+            jclass clss, jint space_id) {
+        herr_t retVal = -1;
+        
+        retVal = H5Sselect_none(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_valid
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1valid(
+            JNIEnv *env, jclass clss, jint space_id) {
+        htri_t bval;
+        bval = H5Sselect_valid(space_id);
+        if (bval > 0) {
+            return JNI_TRUE;
+        }
+        else if (bval == 0) {
+            return JNI_FALSE;
+        }
+        else {
+            h5libraryError(env);
+            return JNI_FALSE;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_simple_extent_npoints
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1npoints(
+            JNIEnv *env, jclass clss, jint space_id) {
+        hssize_t retVal = -1;
+        
+        retVal = H5Sget_simple_extent_npoints(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jlong) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_npoints
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1npoints(
+            JNIEnv *env, jclass clss, jint space_id) {
+        hssize_t retVal = -1;
+        
+        retVal = H5Sget_select_npoints(space_id);
+        
+        if (retVal < 0) 
+            h5libraryError(env);
+
+        return (jlong) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_type
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1type(
+            JNIEnv *env, jclass clss, jint space_id) {
+        int retVal = -1;
+        
+        retVal = H5Sget_select_type(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_simple_extent_ndims
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1ndims(
+            JNIEnv *env, jclass clss, jint space_id) {
+        int retVal = -1;
+        
+        retVal = H5Sget_simple_extent_ndims(space_id);
+        
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_simple_extent_dims
+     * Signature: (I[J[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1dims(
+            JNIEnv *env, jclass clss, jint space_id, jlongArray dims,
+            jlongArray maxdims) {
+        int status;
+        jlong *dimsP, *maxdimsP;
+        jboolean isCopy;
+        hsize_t *sa;
+        hsize_t *msa;
+        int i;
+        int rank = -1;
+        int mrank;
+
+        if (dims == NULL) {
+            dimsP = NULL;
+            sa = (hsize_t *)dimsP;
+        } 
+        else {
+            dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+            if (dimsP == NULL) {
+                h5JNIFatalError(env, "H5Pget_simple_extent_dims:  dims not pinned");
+                return -1;
+            }
+            rank = (int) ENVPTR->GetArrayLength(ENVPAR dims);
+            sa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+            if (sa == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                h5JNIFatalError(env,"H5Sget_simple_extent_dims:  dims not converted to hsize_t");
+                return -1;
+            }
+        }
+        if (maxdims == NULL) {
+            maxdimsP = NULL;
+            msa = (hsize_t *)maxdimsP;
+        } 
+        else {
+            maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims,&isCopy);
+            if (maxdimsP == NULL) {
+                if (dimsP != NULL)  {
+                    ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                    free(sa);
+                }
+                h5JNIFatalError(env,  "H5Pget_simple_extent_dims:  maxdims not pinned");
+                return -1;
+            }
+            mrank = (int) ENVPTR->GetArrayLength(ENVPAR maxdims);
+            if(rank < 0)
+                rank = mrank;
+            else if(mrank != rank) {
+                if (dimsP != NULL)  {
+                    ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                    free(sa);
+                }
+                ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,JNI_ABORT);
+                h5JNIFatalError(env,  "H5Sget_simple_extent_dims:  maxdims rank not same as dims");
+                return -1;
+            }
+            msa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+            if (msa == NULL)  {
+                if (dimsP != NULL)  {
+                    ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                    free(sa);
+                }
+                ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,JNI_ABORT);
+                h5JNIFatalError(env,  "H5Sget_simple_extent_dims:  maxdims not converted to hsize_t");
+                return -1;
+            }
+        }
+
+        status = H5Sget_simple_extent_dims(space_id, (hsize_t *)sa, (hsize_t *)msa);
+
+        if (status < 0) {
+            if (dimsP != NULL)  {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                free(sa);
+            }
+            if (maxdimsP != NULL)  {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,JNI_ABORT);
+                free(msa);
+            }
+            h5libraryError(env);
+            return -1;
+        } 
+
+        if (dimsP != NULL)  {
+            for (i = 0; i < rank; i++) {
+                dimsP[i] = sa[i];
+            }
+            free(sa);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,0);
+        }
+        if (maxdimsP != NULL) {
+            for (i = 0; i < rank; i++) {
+                maxdimsP[i] = msa[i];
+            }
+            free(msa);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,0);
+        }
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_simple_extent_type
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1type(
+            JNIEnv *env, jclass clss, jint space_id) {
+        H5S_class_t retVal = H5S_NO_CLASS;
+        
+        if (space_id < 0)
+            h5libraryError(env);
+        retVal = H5Sget_simple_extent_type(space_id);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sset_extent_simple
+     * Signature: (II[J[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sset_1extent_1simple(
+            JNIEnv *env, jclass clss, jint space_id, jint rank, jlongArray dims,
+            jlongArray maxdims) {
+        herr_t status;
+        jlong *dimsP, *maxdimsP;
+        jboolean isCopy;
+        hsize_t *sa;
+        hsize_t *msa;
+        int i;
+        int drank, mrank;
+        hsize_t *lp;
+        jlong *jlp;
+
+        if (dims == NULL) {
+            h5nullArgument(env, "H5Sset_simple_extent:  dims is NULL");
+            return -1;
+        }
+        drank = (int) ENVPTR->GetArrayLength(ENVPAR dims);
+        if (drank != rank) {
+            h5badArgument(env, "H5Screate_simple:  dims rank is invalid");
+            return -1;
+        }
+        if(maxdims != NULL) {
+            mrank = (int) ENVPTR->GetArrayLength(ENVPAR maxdims);
+            if (mrank != rank) {
+                h5badArgument(env, "H5Screate_simple:  maxdims rank is invalid");
+                return -1;
+            }
+        }
+        dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+        if (dimsP == NULL) {
+            h5JNIFatalError(env, "H5Pset_simple_extent:  dims not pinned");
+            return -1;
+        }
+        sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (sa == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+            h5JNIFatalError(env,"H5Sset_simple_extent:  dims not converted to hsize_t");
+            return -1;
+        }
+        jlp = (jlong *)dimsP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+        if (maxdims == NULL) {
+            maxdimsP = NULL;
+            msa = (hsize_t *)maxdimsP;
+        } 
+        else {
+            maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims,&isCopy);
+            if (maxdimsP == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                h5JNIFatalError(env,  "H5Pset_simple_extent:  maxdims not pinned");
+                return -1;
+            }
+            msa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+            if (msa == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,JNI_ABORT);
+                free (sa);
+                h5JNIFatalError(env,  "H5Sset_simple_extent:  maxdims not converted to hsize_t");
+                return -1;
+            }
+            jlp = (jlong *)maxdimsP;
+            for (i = 0; i < rank; i++) {
+                *lp = (hsize_t)*jlp;
+                lp++;
+                jlp++;
+            }
+        }
+
+        status = H5Sset_extent_simple(space_id, rank, (hsize_t *)sa, (hsize_t *)msa);
+
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+        free (sa);
+        if (maxdimsP != NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims,maxdimsP,JNI_ABORT);
+            free (msa);
+        }
+
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sis_simple
+     * Signature: (I)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sis_1simple(JNIEnv *env,
+            jclass clss, jint space_id) {
+        htri_t bval;
+        bval = H5Sis_simple(space_id);
+        if (bval > 0) {
+            return JNI_TRUE;
+        }
+        else if (bval == 0) {
+            return JNI_FALSE;
+        }
+        else {
+            h5libraryError(env);
+            return JNI_FALSE;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Soffset_simple
+     * Signature: (I[B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Soffset_1simple(JNIEnv *env,
+            jclass clss, jint space_id, jbyteArray offset) {
+        herr_t status;
+        jbyte *P = NULL;
+        jboolean isCopy;
+        hssize_t *sa;
+        int rank;
+        int i;
+        hssize_t *lp;
+        jlong *jlp;
+
+        if (offset != NULL) {
+            P = ENVPTR->GetByteArrayElements(ENVPAR offset, &isCopy);
+            if (P == NULL) {
+                h5JNIFatalError(env, "H5Soffset_simple:  offset not pinned");
+                return -1;
+            }
+            i = (int) ENVPTR->GetArrayLength(ENVPAR offset);
+            rank = i / sizeof(jlong);
+            sa = lp = (hssize_t *)malloc(rank * sizeof(hssize_t));
+            if (sa == NULL) {
+                ENVPTR->ReleaseByteArrayElements(ENVPAR offset,P,JNI_ABORT);
+                h5JNIFatalError(env,"H5Soffset_simple:  offset not converted to hssize_t");
+                return -1;
+            }
+            jlp = (jlong *)P;
+            for (i = 0; i < rank; i++) {
+                *lp = (hssize_t)*jlp;
+                lp++;
+                jlp++;
+            }
+        }
+        else {
+            P = NULL;
+            sa = (hssize_t *)P;
+        }
+
+        status = H5Soffset_simple(space_id, sa);
+        if (P != NULL) {
+            ENVPTR->ReleaseByteArrayElements(ENVPAR offset,P,JNI_ABORT);
+            free(sa);
+        }
+
+        if (status < 0) 
+            h5libraryError(env);
+ 
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sextent_copy
+     * Signature: (II)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sextent_1copy(JNIEnv *env,
+            jclass clss, jint space_id, jint src_id) {
+        herr_t retVal = -1;
+        retVal = H5Sextent_copy(space_id, src_id);
+        if (retVal < 0)
+            h5libraryError(env);
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sextent_equal
+     * Signature: (II)Z
+     */
+    JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sextent_1equal
+      (JNIEnv *env, jclass clss, jint space_id, jint src_id) {
+        htri_t bval;
+        bval = H5Sextent_equal(space_id, src_id);
+        if (bval > 0) {
+            return JNI_TRUE;
+        }
+        else if (bval == 0) {
+            return JNI_FALSE;
+        }
+        else {
+            h5libraryError(env);
+            return JNI_FALSE;
+        }
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sset_extent_none
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sset_1extent_1none(
+            JNIEnv *env, jclass clss, jint space_id) {
+        herr_t retVal = -1;
+        retVal = H5Sset_extent_none(space_id);
+        if (retVal < 0)
+            h5libraryError(env);
+ 
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sselect_hyperslab
+     * Signature: (II[J[J[J[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1hyperslab(
+            JNIEnv *env, jclass clss, jint space_id, jint op, jlongArray start,
+            jlongArray stride, jlongArray count, jlongArray block) {
+        herr_t status;
+        jlong *startP, *strideP, *countP, *blockP;
+        jboolean isCopy;
+        hsize_t *strt;
+        hsize_t *strd;
+        hsize_t *cnt;
+        hsize_t *blk;
+        int rank;
+        int i;
+        hsize_t *lp;
+        jlong *jlp;
+
+        if (start == NULL) {
+            h5nullArgument(env, "H5Sselect_hyperslab:  start is NULL");
+            return -1;
+        }
+        if (count == NULL) {
+            h5nullArgument(env, "H5Sselect_hyperslab:  count is NULL");
+            return -1;
+        }
+        
+        rank = (int) ENVPTR->GetArrayLength(ENVPAR start);
+        if (rank != ENVPTR->GetArrayLength(ENVPAR count)) {
+            h5badArgument(env,
+                    "H5Sselect_hyperslab:  count and start have different rank!");
+            return -1;
+        }
+
+        startP = ENVPTR->GetLongArrayElements(ENVPAR start, &isCopy);
+        if (startP == NULL) {
+            h5JNIFatalError(env, "H5Sselect_hyperslab:  start not pinned");
+            return -1;
+        }
+        strt = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (strt == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,JNI_ABORT);
+            h5JNIFatalError(env,"H5Sselect_hyperslab:  start not converted to hsize_t");
+            return -1;
+        }
+        
+        jlp = (jlong *)startP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+        
+        countP = ENVPTR->GetLongArrayElements(ENVPAR count,&isCopy);
+        if (countP == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+            free(strt);
+            h5JNIFatalError(env,  "H5Sselect_hyperslab:  count not pinned");
+            return -1;
+        }
+        cnt = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (cnt == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP,JNI_ABORT);
+            free(strt);
+            h5JNIFatalError(env,  "H5Sselect_hyperslab:  count not converted to hsize_t");
+            return -1;
+        }
+        
+        jlp = (jlong *)countP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+        if (stride == NULL) {
+            strideP = NULL;
+            strd = (hsize_t *)strideP;
+        } 
+        else {
+            strideP = ENVPTR->GetLongArrayElements(ENVPAR stride,&isCopy);
+            if (strideP == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+                free(cnt); free(strt);
+                h5badArgument( env, "H5Sselect_hyperslab:  stride not pinned");
+                return -1;
+            }
+            strd = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+            if (strd == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP,JNI_ABORT);
+                free(cnt); free(strt);
+                h5JNIFatalError(env,  "H5Sselect_hyperslab:  stride not converted to hsize_t");
+                return -1;
+            }
+            jlp = (jlong *)strideP;
+            for (i = 0; i < rank; i++) {
+                *lp = (hsize_t)*jlp;
+                lp++;
+                jlp++;
+            }
+        }
+        if (block == NULL) {
+            blockP = NULL;
+            blk = (hsize_t *)blockP;
+        } 
+        else {
+            blockP = ENVPTR->GetLongArrayElements(ENVPAR block,&isCopy);
+            if (blockP == NULL)  {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+                free(cnt); free(strt);
+                if (strd != NULL) { free(strd); }
+                h5JNIFatalError(env,  "H5Sselect_hyperslab:  block not pinned");
+                return -1;
+            }
+            blk = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+            if (blk == NULL) {
+                ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP,JNI_ABORT);
+                ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP,JNI_ABORT);
+                free(cnt); free(strt);
+                if (strd != NULL) { free(strd); }
+                h5JNIFatalError(env,  "H5Sget_simple_extent:  block not converted to hsize_t");
+                return -1;
+            }
+            jlp = (jlong *)blockP;
+            for (i = 0; i < rank; i++) {
+                *lp = (hsize_t)*jlp;
+                lp++;
+                jlp++;
+            }
+        }
+
+        status = H5Sselect_hyperslab (space_id, (H5S_seloper_t)op, (const hsize_t *)strt, (const hsize_t *)strd, (const hsize_t *)cnt, (const hsize_t *)blk);
+
+        ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+        free(strt);
+        free(cnt);
+        if (strideP != NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+            free(strd);
+        }
+        if (blockP != NULL)  {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, JNI_ABORT);
+            free(blk);
+        }
+
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sclose
+     * Signature: (I)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Sclose(JNIEnv *env,
+            jclass clss, jint space_id) {
+        herr_t retVal = -1;
+
+        retVal = H5Sclose(space_id);
+
+        if (retVal < 0) {
+            h5libraryError(env);
+        }
+
+        return (jint) retVal;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_hyper_nblocks
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks(
+            JNIEnv *env, jclass clss, jint spaceid) {
+        hssize_t status;
+
+        status = H5Sget_select_hyper_nblocks((hid_t) spaceid);
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jlong) status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_elem_npoints
+     * Signature: (I)J
+     */
+    JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1elem_1npoints(
+            JNIEnv *env, jclass clss, jint spaceid) {
+        hssize_t status;
+
+        status = H5Sget_select_elem_npoints((hid_t) spaceid);
+        if (status < 0)
+            h5libraryError(env);
+
+        return (jlong) status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_hyper_blocklist
+     * Signature: (IJJ[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist(
+            JNIEnv *env, jclass clss, jint spaceid, jlong startblock,
+            jlong numblocks, jlongArray buf) {
+        herr_t status;
+        jlong *bufP;
+        jboolean isCopy;
+        hsize_t *ba;
+        int i;
+        int rank;
+        long st;
+        long nb;
+
+        st = (long) startblock;
+        nb = (long) numblocks;
+
+        if (buf == NULL) {
+            h5nullArgument(env, "H5Sget_select_hyper_blocklist:  buf is NULL");
+            return -1;
+        }
+        rank = H5Sget_simple_extent_ndims(spaceid);
+        if(rank <= 0) rank = 1;
+        if (ENVPTR->GetArrayLength(ENVPAR buf) < (numblocks * rank)) {
+            h5badArgument(env, "H5Sget_select_hyper_blocklist:  buf input array too small");
+            return -1;
+        }
+        bufP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+        if (bufP == NULL) {
+            h5JNIFatalError( env, "H5Sget_select_hyper_blocklist:  buf not pinned");
+            return -1;
+        }
+        ba = (hsize_t *)malloc( nb * 2 * (long)rank * sizeof(hsize_t));
+        if (ba == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP,JNI_ABORT);
+            h5JNIFatalError(env,  "H5Screate-simple:  buffer not converted to hsize_t");
+            return -1;
+        }
+
+        status = H5Sget_select_hyper_blocklist((hid_t)spaceid, (hsize_t)st,
+                (hsize_t)nb, (hsize_t *)ba);
+
+        if (status < 0) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+            free (ba);
+            h5libraryError(env);
+            return -1;
+        } 
+
+        for (i = 0; i < (numblocks*2*rank); i++) {
+            bufP[i] = ba[i];
+        }
+        free (ba);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, 0);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_elem_pointlist
+     * Signature: (IJJ[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1elem_1pointlist(
+            JNIEnv *env, jclass clss, jint spaceid, jlong startpoint,
+            jlong numpoints, jlongArray buf) {
+        herr_t status;
+        jlong *bufP;
+        jboolean isCopy;
+        hsize_t *ba;
+        int i;
+        int rank;
+
+        if (buf == NULL) {
+            h5nullArgument(env, "H5Sget_select_elem_pointlist:  buf is NULL");
+            return -1;
+        }
+        rank = H5Sget_simple_extent_ndims(spaceid);
+        if(rank <= 0) rank = 1;
+        if (ENVPTR->GetArrayLength(ENVPAR buf) < (numpoints * rank)) {
+            h5badArgument(env, "H5Sget_select_elem_pointlist:  buf input array too small");
+            return -1;
+        }
+        bufP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+        if (bufP == NULL) {
+            h5JNIFatalError( env, "H5Sget_select_elem_pointlist:  buf not pinned");
+            return -1;
+        }
+        ba = (hsize_t *)malloc( ((long)numpoints * (long)rank) * sizeof(hsize_t));
+        if (ba == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR buf,bufP,JNI_ABORT);
+            h5JNIFatalError(env,"H5Sget_select_elem_pointlist:  buf not converted to hsize_t");
+            return -1;
+        }
+
+        status = H5Sget_select_elem_pointlist((hid_t)spaceid, (hsize_t)startpoint,
+                (hsize_t)numpoints, (hsize_t *)ba);
+
+        if (status < 0) {
+            free (ba);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR buf,bufP,JNI_ABORT);
+            h5libraryError(env);
+            return -1;
+        } 
+
+        for (i = 0; i < (numpoints*rank); i++) {
+            bufP[i] = ba[i];
+        }
+        free (ba) ;
+        ENVPTR->ReleaseLongArrayElements(ENVPAR buf,bufP,0);
+
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sget_select_bounds
+     * Signature: (I[J[J)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1bounds(
+            JNIEnv *env, jclass clss, jint spaceid, jlongArray start,
+            jlongArray end) {
+        herr_t status;
+        jlong *startP, *endP;
+        jboolean isCopy;
+        hsize_t *strt;
+        hsize_t *en;
+        int rank;
+        int i;
+
+        if (start == NULL) {
+            h5nullArgument(env, "H5Sget_select_bounds:  start is NULL");
+            return -1;
+        }
+
+        if (end == NULL) {
+            h5nullArgument(env, "H5Sget_select_bounds:  end is NULL");
+            return -1;
+        }
+
+        startP = ENVPTR->GetLongArrayElements(ENVPAR start, &isCopy);
+        if (startP == NULL) {
+            h5JNIFatalError( env, "H5Sget_select_bounds:  start not pinned");
+            return -1;
+        }
+        rank = (int)ENVPTR->GetArrayLength(ENVPAR start);
+        strt = (hsize_t *)malloc( rank * sizeof(hsize_t));
+        if (strt == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,JNI_ABORT);
+            h5JNIFatalError(env,"H5Sget_select_bounds:  start not converted to hsize_t");
+            return -1;
+        }
+
+        endP = ENVPTR->GetLongArrayElements(ENVPAR end,&isCopy);
+        if (endP == NULL) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,JNI_ABORT);
+            free(strt);
+            h5JNIFatalError( env, "H5Sget_select_bounds:  end not pinned");
+            return -1;
+        }
+        en = (hsize_t *)malloc( rank * sizeof(hsize_t));
+        if (en == NULL)  {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR end,endP,JNI_ABORT);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,JNI_ABORT);
+            free(strt);
+            h5JNIFatalError(env,  "H5Sget_simple_extent:  dims not converted to hsize_t");
+            return -1;
+        }
+
+        status = H5Sget_select_bounds((hid_t) spaceid, (hsize_t *)strt, (hsize_t *)en);
+
+        if (status < 0) {
+            ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,JNI_ABORT);
+            ENVPTR->ReleaseLongArrayElements(ENVPAR end,endP,JNI_ABORT);
+            free(strt); 
+            free(en);
+            h5libraryError(env);
+            return -1;
+        } 
+
+        for (i = 0; i < rank; i++) {
+            startP[i] = strt[i];
+            endP[i] = en[i];
+        }
+        ENVPTR->ReleaseLongArrayElements(ENVPAR start,startP,0);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR end,endP,0);
+        free(strt); 
+        free(en);
+ 
+        return (jint)status;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sencode
+     * Signature: (I)[B
+     */
+    JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sencode
+      (JNIEnv *env, jclass cls, jint obj_id)
+    {
+        herr_t status = -1;
+        unsigned char *bufPtr;
+        size_t buf_size = 0;
+        jbyteArray returnedArray = NULL;
+
+        if (obj_id < 0) {
+            h5badArgument(env, "H5Sencode: invalid argument");
+            return NULL;
+        }
+        
+        status = H5Sencode(obj_id, NULL, &buf_size);
+
+        if (status < 0) {
+            h5libraryError(env);
+            return NULL;
+        }
+
+        if (buf_size < 0) {
+            h5badArgument( env, "H5Sencode:  buf_size < 0");
+            return NULL;
+        }
+
+        bufPtr = (unsigned char*)calloc((size_t)1, buf_size);
+        if (bufPtr == NULL) {
+            h5outOfMemory( env, "H5Sencode:  calloc failed");
+            return NULL;
+        }
+
+        status = H5Sencode((hid_t)obj_id, bufPtr, &buf_size);
+
+        if (status < 0) {
+            free(bufPtr);
+            h5libraryError(env);
+            return NULL;
+        }
+
+        returnedArray = ENVPTR->NewByteArray(ENVPAR buf_size);
+        ENVPTR->SetByteArrayRegion(ENVPAR returnedArray, 0, buf_size, (jbyte *)bufPtr);
+
+        free(bufPtr);
+
+        return returnedArray;
+    }
+
+    /*
+     * Class:     ncsa_hdf_hdf5lib_H5
+     * Method:    H5Sdecode
+     * Signature: ([B)I
+     */
+    JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sdecode
+      (JNIEnv *env, jclass cls, jbyteArray buf)
+    {
+        hid_t sid = -1;
+        jbyte *bufP;
+        jboolean isCopy;
+
+        if (buf == NULL) {
+            h5nullArgument(env, "H5Sdecode:  buf is NULL");
+            return -1;
+        }
+        bufP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+        if (bufP == NULL) {
+            h5JNIFatalError( env, "H5Sdecode:  buf not pinned");
+            return -1;
+        }
+        sid = H5Sdecode(bufP);
+
+        if (sid < 0) {
+            ENVPTR->ReleaseByteArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+            h5libraryError(env);
+            return -1;
+        }
+        ENVPTR->ReleaseByteArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+
+        return (jint)sid;
+    }
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5sImp.h b/source/c/hdf-java/h5sImp.h
new file mode 100755
index 0000000..a1d1545
--- /dev/null
+++ b/source/c/hdf-java/h5sImp.h
@@ -0,0 +1,239 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5S */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5S
+#define _Included_ncsa_hdf_hdf5lib_H5_H5S
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Sclose
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Scopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Scopy
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Screate
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Screate
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Screate_simple
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Screate_1simple
+  (JNIEnv *, jclass, jint, jlongArray, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sdecode
+ * Signature: ([B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sdecode
+  (JNIEnv *env, jclass cls, jbyteArray buf);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sencode
+ * Signature: (I)[B
+ */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sencode
+  (JNIEnv *env, jclass cls, jint obj_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sextent_copy
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sextent_1copy
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sextent_equal
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sextent_1equal
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_bounds
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1bounds
+  (JNIEnv *, jclass, jint, jlongArray, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_elem_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1elem_1npoints
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_elem_pointlist
+ * Signature: (IJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1elem_1pointlist
+  (JNIEnv *, jclass, jint, jlong, jlong, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_hyper_blocklist
+ * Signature: (IJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist
+  (JNIEnv *, jclass, jint, jlong, jlong, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_hyper_nblocks
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1npoints
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1select_1type
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_dims
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1dims
+  (JNIEnv *, jclass, jint, jlongArray, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_ndims
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1ndims
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1npoints
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1type
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sis_simple
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sis_1simple
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Soffset_simple
+ * Signature: (I[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Soffset_1simple
+  (JNIEnv *, jclass, jint, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_all
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1all
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_elements
+ * Signature: (III[B)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1elements
+  (JNIEnv *, jclass, jint, jint, jint, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_hyperslab
+ * Signature: (II[J[J[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1hyperslab
+  (JNIEnv *, jclass, jint, jint, jlongArray, jlongArray, jlongArray, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_none
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1none
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_valid
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sselect_1valid
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sset_extent_none
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sset_1extent_1none
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sset_extent_simple
+ * Signature: (II[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Sset_1extent_1simple
+  (JNIEnv *, jclass, jint, jint, jlongArray, jlongArray);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/source/c/hdf-java/h5tImp.c b/source/c/hdf-java/h5tImp.c
new file mode 100755
index 0000000..aa4392a
--- /dev/null
+++ b/source/c/hdf-java/h5tImp.c
@@ -0,0 +1,1838 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Group Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5tImp.h"
+//#include "h5util.h"
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Topen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Topen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    hid_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Topen:  name is NULL");
+        return -1;
+    }
+    tname = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Topen:  name not pinned");
+        return -1;
+    }
+    status = H5Topen2(loc_id, tname, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,tname);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommit1
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommit1
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tcommit:  name is NULL");
+        return -1;
+    }
+    tname = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tcommit:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tcommit2(loc_id, tname, type, (hid_t)H5P_DEFAULT, (hid_t)H5P_DEFAULT, (hid_t)H5P_DEFAULT);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,tname);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommitted
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommitted
+  (JNIEnv *env, jclass clss, jint type)
+{
+    htri_t bval;
+    bval = H5Tcommitted(type);
+    if (bval > 0) {
+        return JNI_TRUE;
+    }
+    else if (bval == 0) {
+        return JNI_FALSE;
+    }
+    else {
+        /* raise exception -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcreate
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tcreate
+  (JNIEnv *env, jclass clss, jint dclass, jlong size)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tcreate((H5T_class_t )dclass, (size_t)size );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tcopy
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tcopy(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tequal
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tequal
+  (JNIEnv *env, jclass clss, jint type_id1, jint type_id2)
+{
+    htri_t bval;
+    bval = H5Tequal(type_id1, type_id2 );
+    if (bval > 0) {
+        return JNI_TRUE;
+    }
+    else if (bval == 0) {
+        return JNI_FALSE;
+    }
+    else {
+        /* raise exception -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tlock
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tlock
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tlock(type_id );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_class
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1class
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_class_t retVal = H5T_NO_CLASS;
+    retVal =  H5Tget_class(type_id );
+    if (retVal == H5T_NO_CLASS) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1size
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_size(type_id );
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1size_1long
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_size(type_id );
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_size
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1size
+  (JNIEnv *env, jclass clss, jint type_id, jlong size)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_size(type_id, (size_t)size );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_order
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1order
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_order_t retVal = H5T_ORDER_ERROR;
+    retVal =  H5Tget_order(type_id );
+    if (retVal == H5T_ORDER_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_order
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1order
+  (JNIEnv *env, jclass clss, jint type_id, jint order)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_order(type_id, (H5T_order_t)order);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_precision
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1precision
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_precision(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_precision_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1precision_1long
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_precision(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_precision
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1precision
+  (JNIEnv *env, jclass clss, jint type_id, jlong precision)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_precision(type_id, (size_t)precision);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_offset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1offset
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    int retVal = 0;
+    retVal =  H5Tget_offset(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_offset
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1offset
+  (JNIEnv *env, jclass clss, jint type_id, jlong offset)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_offset(type_id, (size_t)offset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_pad
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1pad
+  (JNIEnv *env, jclass clss, jint type_id, jintArray pad)
+{
+    herr_t status;
+    jboolean isCopy;
+    jint *P;
+
+    if (pad == NULL) {
+        h5nullArgument( env, "H5Tget_pad:  pad is NULL");
+        return -1;
+    }
+    P = ENVPTR->GetIntArrayElements(ENVPAR pad,&isCopy);
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Tget_pad:  pad not pinned");
+        return -1;
+    }
+    status = H5Tget_pad(type_id, (H5T_pad_t *)&(P[0]), (H5T_pad_t *)&(P[1]));
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR pad,P,JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR pad,P,0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_pad
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1pad
+  (JNIEnv *env, jclass clss, jint type_id, jint lsb, jint msb)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_pad(type_id, (H5T_pad_t)lsb, (H5T_pad_t)msb);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_sign
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1sign
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_sign_t retVal = H5T_SGN_ERROR;
+    retVal =  H5Tget_sign(type_id);
+    if (retVal == H5T_SGN_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_sign
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1sign
+  (JNIEnv *env, jclass clss, jint type_id, jint sign)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_sign(type_id, (H5T_sign_t)sign);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_fields_int
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1fields_1int
+  (JNIEnv *env, jclass clss, jint type_id, jintArray fields)
+{
+    herr_t status;
+    jboolean isCopy;
+    jint *P;
+
+    if (fields == NULL) {
+        h5nullArgument( env, "H5Tget_fields:  fields is NULL");
+        return -1;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR fields) < 5) {
+        h5badArgument( env, "H5Tget_fields:  fields input array < order 5");
+        return -1;
+    }
+    P = ENVPTR->GetIntArrayElements(ENVPAR fields,&isCopy);
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Tget_fields:  fields not pinned");
+        return -1;
+    }
+
+    status = H5Tget_fields(type_id, (size_t *)&(P[0]), (size_t *)&(P[1]), (size_t *)&(P[2]), (size_t *)&(P[3]), (size_t *)&(P[4]));
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR fields,P,JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR fields,P,0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_fields
+ * Signature: (I[J)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jlongArray fields)
+{
+    herr_t status;
+    jboolean isCopy;
+    jlong *fieldsArray;
+
+    if (fields == NULL) {
+        h5nullArgument( env, "H5Tget_fields:  fields is NULL");
+        return;
+    }
+    if (ENVPTR->GetArrayLength(ENVPAR fields) < 5) {
+        h5badArgument( env, "H5Tget_fields:  fields input array < order 5");
+        return;
+    }
+    fieldsArray = ENVPTR->GetLongArrayElements(ENVPAR fields, &isCopy);
+    if (fieldsArray == NULL) {
+        h5JNIFatalError(env,  "H5Tget_fields:  fields not pinned");
+        return;
+    }
+
+    {
+        /* direct cast (size_t *)variable fails on 32-bit environment */
+        size_t spos_t = 0;
+        size_t epos_t = 0;
+        size_t esize_t = 0;
+        size_t mpos_t = 0;
+        size_t msize_t = 0;
+        long long fields_temp = *(&fieldsArray[0]);
+        spos_t = (size_t)fields_temp;
+        fields_temp = *(&fieldsArray[1]);
+        epos_t = (size_t)fields_temp;
+        fields_temp = *(&fieldsArray[2]);
+        esize_t = (size_t)fields_temp;
+        fields_temp = *(&fieldsArray[3]);
+        mpos_t = (size_t)fields_temp;
+        fields_temp = *(&fieldsArray[4]);
+        msize_t = (size_t)fields_temp;
+
+        status = H5Tget_fields(type_id, &spos_t, &epos_t, &esize_t, &mpos_t, &msize_t);
+
+        *(&fieldsArray[0]) = spos_t;
+        *(&fieldsArray[1]) = epos_t;
+        *(&fieldsArray[2]) = esize_t;
+        *(&fieldsArray[3]) = mpos_t;
+        *(&fieldsArray[4]) = msize_t;
+    }
+
+    if (status < 0) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR fields, fieldsArray, JNI_ABORT);
+        h5libraryError(env);
+        return;
+    }
+
+    ENVPTR->ReleaseLongArrayElements(ENVPAR fields, fieldsArray, 0);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_fields
+ * Signature: (IJJJJJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jlong spos, jlong epos,
+  jlong esize, jlong mpos, jlong msize)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_fields(type_id, (size_t)spos, (size_t)epos, (size_t)esize, (size_t)mpos, (size_t)msize);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_ebias
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1ebias
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_ebias(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_ebias_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1ebias_1long
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_ebias(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_ebias
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1ebias
+  (JNIEnv *env, jclass clss, jint type_id, jlong ebias)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_ebias(type_id, (size_t)ebias);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_norm
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1norm
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_norm_t retVal = H5T_NORM_ERROR;
+    retVal =  H5Tget_norm(type_id);
+    if (retVal == H5T_NORM_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_norm
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1norm
+  (JNIEnv *env, jclass clss, jint type_id, jint norm)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_norm(type_id, (H5T_norm_t )norm);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_inpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1inpad
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_pad_t retVal = H5T_PAD_ERROR;
+    retVal =  H5Tget_inpad(type_id );
+    if (retVal == H5T_PAD_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_inpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1inpad
+  (JNIEnv *env, jclass clss, jint type_id, jint inpad)
+{
+    herr_t retVal = -1;
+    retVal = H5Tset_inpad(type_id, (H5T_pad_t) inpad);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_cset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1cset
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_cset_t retVal = H5T_CSET_ERROR;
+    retVal =  H5Tget_cset(type_id);
+    if (retVal == H5T_CSET_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_cset
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1cset
+  (JNIEnv *env, jclass clss, jint type_id, jint cset)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_cset(type_id, (H5T_cset_t)cset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_strpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1strpad
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_str_t retVal = H5T_STR_ERROR;
+    retVal =  H5Tget_strpad(type_id);
+    if (retVal == H5T_STR_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_strpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1strpad
+  (JNIEnv *env, jclass clss, jint type_id, jint strpad)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_strpad(type_id, (H5T_str_t)strpad);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_nmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1nmembers
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    int retVal = -1;
+    retVal =  H5Tget_nmembers(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_name
+ * Signature: (II)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1name
+  (JNIEnv *env, jclass clss, jint type_id, jint field_idx)
+{
+    char *name;
+    jstring str;
+
+    name = H5Tget_member_name(type_id, field_idx);
+
+    if (name == NULL) {
+        return NULL;
+    }
+
+    /* may throw OutOfMemoryError */
+    str = ENVPTR->NewStringUTF(ENVPAR name);
+    H5free_memory(name);
+
+    if (str == NULL)  {
+        h5JNIFatalError(env,  "H5Tget_member_name:  returned string not created");
+        return NULL;
+    }
+
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_index
+ * Signature: (ILjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1index
+  (JNIEnv *env, jclass clss, jint type_id, jstring field_name)
+{
+    char *tname;
+    int index;
+    jboolean isCopy;
+
+    if (field_name == NULL) {
+        h5nullArgument( env, "H5Tget_member_index:  field_name is NULL");
+        return -1;
+    }
+    tname = (char *)ENVPTR->GetStringUTFChars(ENVPAR field_name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tget_member_index:  field_name not pinned");
+        return -1;
+    }
+
+    index = H5Tget_member_index(type_id, tname);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR field_name,tname);
+
+    if (index < 0) {
+        h5libraryError(env);
+    }
+
+    return index;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_type
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tget_1member_1type
+  (JNIEnv *env, jclass clss, jint type_id, jint field_idx)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tget_member_type(type_id, field_idx);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_offset
+ * Signature: (II)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1offset
+  (JNIEnv *env, jclass clss, jint type_id, jint memno)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_member_offset((hid_t)type_id, memno);
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_class
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1class
+  (JNIEnv *env, jclass clss, jint type_id, jint memno)
+{
+    int retVal = 0;
+    retVal =  H5Tget_member_class((hid_t)type_id, memno);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tinsert
+ * Signature: (ILjava/lang/String;JI)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tinsert
+  (JNIEnv *env, jclass clss, jint type_id, jstring name, jlong offset, jint field_id)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+    long off;
+
+    off = (long)offset;
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tinsert:  name is NULL");
+        return -1;
+    }
+    tname =(char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tinsert:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tinsert(type_id, tname, (size_t)off, field_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,tname);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tpack
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tpack
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tpack(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tclose
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = 0;
+
+    retVal =  H5Tclose(type_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tvlen_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tvlen_1create
+  (JNIEnv *env, jclass clss, jint base_id)
+{
+    hid_t status;
+
+    status = H5Tvlen_create((hid_t)base_id);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_tag
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1tag
+  (JNIEnv *env, jclass clss, jint type, jstring tag)
+{
+    herr_t status;
+    char *tagP;
+    jboolean isCopy;
+
+    if (tag == NULL) {
+        h5nullArgument( env, "H5Tset_tag:  tag is NULL");
+        return -1;
+    }
+
+    tagP = (char *)ENVPTR->GetStringUTFChars(ENVPAR tag,&isCopy);
+    if (tagP == NULL) {
+        h5JNIFatalError( env, "H5Tset_tag:  tag not pinned");
+        return -1;
+    }
+
+    status = H5Tset_tag((hid_t)type, tagP);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR tag,tagP);
+
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_tag
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1tag
+  (JNIEnv *env, jclass clss, jint type)
+{
+    jstring str;
+    char *tag;
+
+    tag = H5Tget_tag((hid_t)type);
+
+    if (tag == NULL)
+        return NULL;
+
+    /* may throw OutOfMemoryError */
+    str = ENVPTR->NewStringUTF(ENVPAR tag);
+    H5free_memory(tag);
+
+    if (str == NULL)  {
+        h5JNIFatalError(env,  "H5Tget_tag:  returned string not created");
+        return NULL;
+    }
+
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_super
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tget_1super
+  (JNIEnv *env, jclass clss, jint type)
+{
+    hid_t status;
+
+    status = H5Tget_super((hid_t)type);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tenum_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tenum_1create
+  (JNIEnv *env, jclass clss, jint base_id)
+{
+    hid_t status;
+
+    status = H5Tenum_create((hid_t)base_id);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert_int
+ * Signature: (ILjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1insert_1int
+  (JNIEnv *env, jclass clss, jint type, jstring name, jintArray value)
+{
+    herr_t status;
+    jint *intP;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_insert:  name is NULL");
+        return -1;
+    }
+
+    nameP = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_insert:  name not pinned");
+        return -1;
+    }
+
+    if ( value == NULL ) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5nullArgument( env, "H5Tenum_insert:  value is NULL");
+        return -1;
+    }
+
+    intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy);
+    if (intP == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5JNIFatalError( env, "H5Tenum_insert:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_insert((hid_t)type, nameP, intP);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert
+ * Signature: (ILjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1insert
+  (JNIEnv *env, jclass clss, jint type, jstring name, jbyteArray value)
+{
+    herr_t status;
+    jbyte *byteP;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_insert:  name is NULL");
+        return;
+    }
+
+    nameP = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_insert:  name not pinned");
+        return;
+    }
+
+    if ( value == NULL ) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5nullArgument( env, "H5Tenum_insert:  value is NULL");
+        return;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+    if (byteP == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5JNIFatalError( env, "H5Tenum_insert:  value not pinned");
+        return;
+    }
+
+    status = H5Tenum_insert((hid_t)type, nameP, byteP);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_nameof_int
+ * Signature: (I[I[Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1nameof_1int
+  (JNIEnv *env, jclass clss, jint type, jintArray value, jobjectArray name,
+          jint size)
+{
+    hid_t status;
+    jint *intP;
+    char *nameP;
+    jboolean isCopy;
+    jstring str;
+
+    if (size <= 0) {
+        h5badArgument( env, "H5Tenum_nameof:  name size < 0");
+        return -1;
+    }
+
+    nameP = (char *)malloc(sizeof(char)*size);
+    if (nameP == NULL) {
+        /* exception -- out of memory */
+        h5outOfMemory( env, "H5Tenum_nameof:  malloc name size");
+        return -1;
+    }
+
+    if ( value == NULL ) {
+        free(nameP);
+        h5nullArgument( env, "H5Tenum_nameof:  value is NULL");
+        return -1;
+    }
+
+    intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy);
+    if (intP == NULL) {
+        free(nameP);
+        h5JNIFatalError( env, "H5Tenum_nameof:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_nameof((hid_t)type, intP, nameP, (size_t)size);
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+
+    if (status < 0) {
+        free(nameP);
+        h5libraryError(env);
+        return -1;
+    }
+    str = ENVPTR->NewStringUTF(ENVPAR nameP);
+    if (str == NULL) {
+        free(nameP);
+        h5JNIFatalError( env, "H5Tenum_nameof:  return array not created");
+        return -1;
+    }
+    /*  SetObjectArrayElement may raise exceptions */
+    ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+
+    free(nameP);
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_nameof
+ * Signature: (I[BJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1nameof
+  (JNIEnv *env, jclass clss, jint type, jbyteArray value, jlong size)
+{
+    hid_t status;
+    jbyte *byteP;
+    char *nameP;
+    jboolean isCopy;
+    jstring str;
+
+    if (size <= 0) {
+        h5badArgument( env, "H5Tenum_nameof:  name size < 0");
+        return NULL;
+    }
+
+    nameP = (char *)malloc(sizeof(char)*(size_t)size);
+    if (nameP == NULL) {
+        /* exception -- out of memory */
+        h5outOfMemory( env, "H5Tenum_nameof:  malloc name size");
+        return NULL;
+    }
+
+    if ( value == NULL ) {
+        free(nameP);
+        h5nullArgument( env, "H5Tenum_nameof:  value is NULL");
+        return NULL;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+    if (byteP == NULL) {
+        free(nameP);
+        h5JNIFatalError( env, "H5Tenum_nameof:  value not pinned");
+        return NULL;
+    }
+
+    status = H5Tenum_nameof((hid_t)type, byteP, nameP, (size_t)size);
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+
+    if (status < 0) {
+        free(nameP);
+        h5libraryError(env);
+        return NULL;
+    }
+    str = ENVPTR->NewStringUTF(ENVPAR nameP);
+    if (str == NULL) {
+        free(nameP);
+        h5JNIFatalError( env, "H5Tenum_nameof:  return array not created");
+        return NULL;
+    }
+
+    free(nameP);
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_valueof_int
+ * Signature: (ILjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1valueof_1int
+  (JNIEnv *env, jclass clss, jint type, jstring name, jintArray value)
+{
+    hid_t status;
+    jint *intP;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_valueof:  name is NULL");
+        return -1;
+    }
+
+    nameP = (char *)ENVPTR->GetStringUTFChars(ENVPAR name, &isCopy);
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_valueof:  name not pinned");
+        return -1;
+    }
+
+    if ( value == NULL ) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5nullArgument( env, "H5Tenum_valueof:  value is NULL");
+        return -1;
+    }
+
+    intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy);
+    if (intP == NULL)  {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5JNIFatalError( env, "H5Tenum_valueof:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_valueof((hid_t)type, nameP, intP);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+        h5libraryError(env);
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, 0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_valueof
+ * Signature: (ILjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1valueof
+  (JNIEnv *env, jclass clss, jint type, jstring name, jbyteArray value)
+{
+    hid_t status;
+    jbyte *byteP;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_valueof:  name is NULL");
+        return;
+    }
+
+    nameP = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_valueof:  name not pinned");
+        return;
+    }
+
+    if (value == NULL) {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+        h5nullArgument( env, "H5Tenum_valueof:  value is NULL");
+        return;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+    if (byteP == NULL)  {
+        ENVPTR->ReleaseStringUTFChars(ENVPAR name,nameP);
+        h5JNIFatalError( env, "H5Tenum_valueof:  value not pinned");
+        return;
+    }
+
+    status = H5Tenum_valueof((hid_t)type, nameP, byteP);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name, nameP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+        h5libraryError(env);
+        return;
+    }
+    ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, 0);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_value_int
+ * Signature: (II[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1value_1int
+  (JNIEnv *env, jclass clss, jint type, jint membno, jintArray value)
+{
+    hid_t status;
+    jint *intP;
+    jboolean isCopy;
+
+    if ( value == NULL ) {
+        h5nullArgument( env, "H5Tget_member_value:  value is NULL");
+        return -1;
+    }
+
+    intP = ENVPTR->GetIntArrayElements(ENVPAR value,&isCopy);
+    if (intP == NULL) {
+        h5JNIFatalError( env, "H5Tget_member_value:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tget_member_value((hid_t)type, (int)membno, intP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR value,intP,JNI_ABORT);
+        h5libraryError(env);
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR value,intP,0);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_value
+ * Signature: (II[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1value
+  (JNIEnv *env, jclass clss, jint type, jint membno, jbyteArray value)
+{
+    hid_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+    if ( value == NULL ) {
+        h5nullArgument( env, "H5Tget_member_value:  value is NULL");
+        return;
+    }
+
+    byteP = ENVPTR->GetByteArrayElements(ENVPAR value,&isCopy);
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Tget_member_value:  value not pinned");
+        return;
+    }
+
+    status = H5Tget_member_value((hid_t)type, (int)membno, byteP);
+
+    if (status < 0) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR value,byteP,JNI_ABORT);
+        h5libraryError(env);
+        return;
+    }
+    ENVPTR->ReleaseByteArrayElements(ENVPAR value,byteP,0);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tarray_create
+ * Signature: (II[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tarray_1create
+  (JNIEnv *env, jclass clss, jint base, jint rank, jintArray dims, jintArray perms)
+{
+    hid_t status;
+    jint *dimsP;
+    jint *permP;
+    int dlen;
+    hsize_t *cdims=NULL;
+    jboolean isCopy;
+    int i;
+
+    if (rank <= 0) {
+        h5badArgument( env, "H5Tarray_create:  rank is < 1");
+        return -1;
+    }
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tarray_create:  dims is NULL");
+        return -1;
+    }
+
+    dimsP = ENVPTR->GetIntArrayElements(ENVPAR dims,&isCopy);
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tarray_create:  dimsP not pinned");
+        return -1;
+    }
+
+    dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+    if (dlen != rank) {
+        h5JNIFatalError( env, "H5Tarray_create:  dims len != rank");
+        ENVPTR->ReleaseIntArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+        return -1;
+    }
+
+    if (perms == NULL) {
+        permP = NULL;
+    }
+    else {
+        permP = ENVPTR->GetIntArrayElements(ENVPAR perms,&isCopy);
+        if (permP == NULL) {
+            h5JNIFatalError( env, "H5Tarray_create:  permP not pinned");
+            ENVPTR->ReleaseIntArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+            return -1;
+        }
+    }
+
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+    for (i = 0; i < dlen; i++) {
+        cdims[i] = (hsize_t)dimsP[i];
+    }
+
+    status = H5Tarray_create2((hid_t)base, (int)rank, (const hsize_t *)cdims);
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+    if (permP != NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR perms,permP,JNI_ABORT);
+    }
+
+    free (cdims);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_array_dims
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1ndims
+  (JNIEnv *env, jclass clss, jint dt)
+{
+    hid_t status;
+
+    status = H5Tget_array_ndims((hid_t)dt);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tarray_get_dims
+ * Signature: (I[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1dims
+  (JNIEnv *env, jclass clss, jint dt, jintArray dims, jintArray perms)
+{
+    hid_t status;
+    jint *dimsP;
+    jint *permP = NULL;     //parameter perm is never used
+    int dlen;
+    int i;
+    hsize_t *cdims=NULL;
+    jboolean isCopy;
+
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tget_array_dims:  value is NULL");
+        return -1;
+    }
+
+    dimsP = ENVPTR->GetIntArrayElements(ENVPAR dims,&isCopy);
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tget_array_dims:  dimsP not pinned");
+        return -1;
+    }
+
+    dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+
+    status = H5Tget_array_dims2((hid_t)dt, (hsize_t *)cdims);
+
+    if (status < 0) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i = 0; i < dlen; i++) {
+        dimsP[i] = (jint) cdims[i];
+    }
+    ENVPTR->ReleaseIntArrayElements(ENVPAR dims,dimsP,0);
+
+    if (cdims) free(cdims);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tis_variable_str
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tis_1variable_1str
+  (JNIEnv *env, jclass clss, jint dtype_id)
+{
+    htri_t bval;
+    bval = H5Tis_variable_str((hid_t)dtype_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    }
+    else if (bval == 0) {
+        return JNI_FALSE;
+    }
+    else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_native_type
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tget_1native_1type
+  (JNIEnv *env, jclass clss, jint dtype_id, jint direction)
+{
+    hid_t native_tid;
+
+    native_tid = H5Tget_native_type((hid_t)dtype_id, (H5T_direction_t)direction);
+
+    if (native_tid < 0){
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)native_tid;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tdetect_class
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tdetect_1class
+  (JNIEnv *env, jclass clss, jint dtype_id, jint dtype_class)
+{
+    htri_t bval;
+    bval = H5Tdetect_class((hid_t)dtype_id, (H5T_class_t)dtype_class);
+    if (bval > 0) {
+        return JNI_TRUE;
+    }
+    else if (bval == 0) {
+        return JNI_FALSE;
+    }
+    else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Topen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Topen2
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist)
+{
+    hid_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Topen2:  name is NULL");
+        return -1;
+    }
+    tname = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Topen2:  name not pinned");
+        return -1;
+    }
+
+    status = H5Topen2(loc_id, tname, (hid_t)access_plist);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,tname);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommit
+ * Signature: (ILjava/lang/String;IIII)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommit
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type,
+          jint link_plist_id, jint create_plist_id, jint access_plist_id)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tcommit2:  name is NULL");
+        return;
+    }
+    tname = (char *)ENVPTR->GetStringUTFChars(ENVPAR name,&isCopy);
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tcommit2:  name not pinned");
+        return;
+    }
+
+    status = H5Tcommit2(loc_id, tname, type, (hid_t)link_plist_id, (hid_t)create_plist_id, (hid_t)access_plist_id);
+
+    ENVPTR->ReleaseStringUTFChars(ENVPAR name,tname);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tarray_create2
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tarray_1create2
+  (JNIEnv *env, jclass clss, jint base, jint rank, jlongArray dims)
+{
+    hid_t status;
+    jlong *dimsP;
+    int dlen;
+    hsize_t *cdims=NULL;
+    jboolean isCopy;
+    int i;
+
+    if (rank <= 0) {
+        h5badArgument( env, "H5Tarray_create:  rank is < 1");
+        return -1;
+    }
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tarray_create:  dims is NULL");
+        return -1;
+    }
+
+    dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims,&isCopy);
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tarray_create:  dimsP not pinned");
+        return -1;
+    }
+
+    dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+    if (dlen != rank) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+        return -1;
+    }
+
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+    for (i = 0; i < dlen; i++) {
+        cdims[i] = (hsize_t)dimsP[i];
+    }
+
+    status = H5Tarray_create2((hid_t)base, (int)rank, (const hsize_t *)cdims);
+
+    ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+
+    free (cdims);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tarray_get_dims2
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1dims2
+  (JNIEnv *env, jclass clss, jint dt, jlongArray dims)
+{
+    hid_t status;
+    jlong *dimsP;
+    int dlen;
+    int i;
+    hsize_t *cdims=NULL;
+    jboolean isCopy;
+
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tget_array_dims:  value is NULL");
+        return -1;
+    }
+
+    dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims,&isCopy);
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tget_array_dims:  dimsP not pinned");
+        return -1;
+    }
+
+    dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+
+    status = H5Tget_array_dims2((hid_t)dt, (hsize_t *)cdims);
+
+    if (status < 0) {
+        if (cdims)
+            free(cdims);
+        ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,JNI_ABORT);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i = 0; i < dlen; i++) {
+        dimsP[i] = (jlong) cdims[i];
+    }
+    ENVPTR->ReleaseLongArrayElements(ENVPAR dims,dimsP,0);
+
+    if (cdims)
+        free(cdims);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcovert
+ * Signature: (I[BJ)Ljava/lang/String;
+ * H5Tconvert(int src_id, int dst_id, long nelmts, byte[] buf, byte[] background, int plist_id)
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tconvert
+  (JNIEnv *env, jclass clss,
+      jint src_id, jint dst_id, jlong nelmts,
+      jbyteArray buf, jbyteArray background, jint plist_id)
+{
+    hid_t status;
+    jbyte *bufP, *bgP=NULL;
+    jboolean isCopy;
+
+    if (nelmts <= 0) {
+        h5badArgument( env, "H5Tconvert:  name nelmts < 0");
+        return;
+    }
+
+    bufP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+    if (bufP == NULL) {
+        h5JNIFatalError( env, "H5Tconvert:  value not pinned");
+        return;
+    }
+
+    if (background)
+      bgP = ENVPTR->GetByteArrayElements(ENVPAR background, &isCopy);
+
+    status = H5Tconvert( (hid_t) src_id, (hid_t) dst_id, (size_t) nelmts, (void *)bufP, (void *)bgP, (hid_t) plist_id ) ;
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR buf, bufP, 0);
+
+    if (bgP)
+        ENVPTR->ReleaseByteArrayElements(ENVPAR background, bgP, JNI_ABORT);
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5tImp.h b/source/c/hdf-java/h5tImp.h
new file mode 100755
index 0000000..7405ddf
--- /dev/null
+++ b/source/c/hdf-java/h5tImp.h
@@ -0,0 +1,562 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ncsa_hdf_hdf5lib_H5_H5T */
+
+#ifndef _Included_ncsa_hdf_hdf5lib_H5_H5T
+#define _Included_ncsa_hdf_hdf5lib_H5_H5T
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Topen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Topen
+  (JNIEnv *, jclass, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommit1
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommit1
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommitted
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommitted
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcreate
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcreate
+  (JNIEnv *, jclass, jint, jlong);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcopy
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tequal
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tequal
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tlock
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tlock
+  (JNIEnv *env, jclass clss, jint type_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_class
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1class
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1size
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1size_1long
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_size
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1size
+  (JNIEnv *env, jclass clss, jint type_id, jlong size);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_order
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1order
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_order
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1order
+  (JNIEnv *env, jclass clss, jint type_id, jint order);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_precision
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1precision
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_precision
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1precision_1long
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_precision
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1precision
+  (JNIEnv *env, jclass clss, jint type_id, jlong precision);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_offset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1offset
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_offset
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1offset
+  (JNIEnv *env, jclass clss, jint type_id, jlong offset);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_pad
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1pad
+  (JNIEnv *, jclass, jint, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_pad
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1pad
+  (JNIEnv *env, jclass clss, jint type_id, jint lsb, jint msb);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_sign
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1sign
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_sign
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1sign
+  (JNIEnv *env, jclass clss, jint type_id, jint sign);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_fields_int
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1fields_1int
+  (JNIEnv *env, jclass clss, jint type_id, jintArray fields);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_fields
+ * Signature: (I[J)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jlongArray fields);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_fields
+ * Signature: (IJJJJJ)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jlong spos, jlong epos,
+  jlong esize, jlong mpos, jlong msize);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_ebias
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1ebias
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_ebias_long
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1ebias_1long
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_ebias
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1ebias
+  (JNIEnv *env, jclass clss, jint type_id, jlong ebias);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_norm
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1norm
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_norm
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1norm
+  (JNIEnv *env, jclass clss, jint type_id, jint norm);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_inpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1inpad
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_inpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1inpad
+  (JNIEnv *env, jclass clss, jint type_id, jint inpad);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_cset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1cset
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_cset
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1cset
+  (JNIEnv *env, jclass clss, jint type_id, jint cset);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_strpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1strpad
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_strpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1strpad
+  (JNIEnv *env, jclass clss, jint type_id, jint strpad);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_nmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1nmembers
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_name
+ * Signature: (II)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1name
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_index
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1index
+  (JNIEnv *, jclass, jint, jstring);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tget_member_type
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tget_1member_1type
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_offset
+ * Signature: (II)J
+ */
+JNIEXPORT jlong JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1offset
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_class
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1class
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tinsert
+ * Signature: (ILjava/lang/String;JI)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tinsert
+  (JNIEnv *env, jclass clss, jint type_id, jstring name, jlong offset, jint field_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tpack
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tpack
+  (JNIEnv *env, jclass clss, jint type_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tclose
+  (JNIEnv *env, jclass clss, jint type_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tvlen_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tvlen_1create
+  (JNIEnv *env, jclass clss, jint base_id);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_tag
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tset_1tag
+  (JNIEnv *env, jclass clss, jint type, jstring tag);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_tag
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1tag
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_super
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1super
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tenum_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tenum_1create
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert_int
+ * Signature: (ILjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1insert_1int
+  (JNIEnv *, jclass, jint, jstring, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert
+ * Signature: (ILjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1insert
+  (JNIEnv *, jclass, jint, jstring, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_nameof_int
+ * Signature: (I[I[Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1nameof_1int
+  (JNIEnv *env, jclass clss, jint type, jintArray value, jobjectArray name, 
+          jint size);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_nameof
+ * Signature: (I[BJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1nameof
+  (JNIEnv *, jclass, jint, jbyteArray, jlong size);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_valueof_int
+ * Signature: (ILjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1valueof_1int
+  (JNIEnv *, jclass, jint, jstring, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_valueof
+ * Signature: (ILjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tenum_1valueof
+  (JNIEnv *, jclass, jint, jstring, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_value_int
+ * Signature: (II[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1value_1int
+  (JNIEnv *, jclass, jint, jint, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_value
+ * Signature: (II[B)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1member_1value
+  (JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tarray_create
+ * Signature: (II[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tarray_1create
+  (JNIEnv *, jclass, jint, jint, jintArray, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_array_ndims
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1ndims
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_array_dims
+ * Signature: (I[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1dims
+  (JNIEnv *, jclass, jint, jintArray, jintArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tis_variable_str
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tis_1variable_1str
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_native_type
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1native_1type
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tdetect_class
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tdetect_1class
+  (JNIEnv *, jclass, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Topen2
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Topen2
+  (JNIEnv *, jclass, jint, jstring, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommit
+ * Signature: (ILjava/lang/String;IIII)V
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tcommit
+  (JNIEnv *, jclass, jint, jstring, jint, jint, jint, jint);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    _H5Tarray_create2
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5__1H5Tarray_1create2
+  (JNIEnv *, jclass, jint, jint, jlongArray);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_array_dims2
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Tget_1array_1dims2
+  (JNIEnv *, jclass, jint, jlongArray);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/source/c/hdf-java/h5util.c b/source/c/hdf-java/h5util.c
new file mode 100755
index 0000000..398e654
--- /dev/null
+++ b/source/c/hdf-java/h5util.c
@@ -0,0 +1,1830 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF5.  The full HDF5 copyright notice, including     *
+ * terms governing use, modification, and redistribution, is contained in    *
+ * the files COPYING and Copyright.html.  COPYING can be found at the root   *
+ * of the source code distribution tree; Copyright.html can be found at the  *
+ * root level of an installed copy of the electronic HDF5 document set and   *
+ * is linked from the top-level documents page.  It can also be found at     *
+ * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html.  If you do not have     *
+ * access to either file, you may request a copy from hdfhelp at ncsa.uiuc.edu. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5util.h"
+#include "jni.h"
+
+    /* size of hyperslab buffer when a dataset is bigger than H5TOOLS_MALLOCSIZE */
+    hsize_t H5TOOLS_BUFSIZE = (32 * 1024 * 1024);  /* 32 MB */
+    int     H5TOOLS_TEXT_BLOCK = 16;  /* Number of elements on a line in a text export file */
+
+    JavaVM *jvm;
+    jobject visit_callback;
+
+int     h5str_dump_region_blocks(h5str_t *str, hid_t region, hid_t region_obj);
+int     h5str_dump_region_points(h5str_t *str, hid_t region, hid_t region_obj);
+int     h5str_is_zero(const void *_mem, size_t size);
+hid_t   h5str_get_native_type(hid_t type);
+hid_t   h5str_get_little_endian_type(hid_t type);
+hid_t   h5str_get_big_endian_type(hid_t type);
+htri_t  h5str_detect_vlen(hid_t tid);
+htri_t  h5str_detect_vlen_str(hid_t tid);
+int     h5tools_dump_simple_data(FILE *stream, hid_t container, hid_t type, void *_mem, hsize_t nelmts);
+int     h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hsize_t block_nelmts);
+int     render_bin_output_region_data_blocks(FILE *stream, hid_t region_id,
+            hid_t container, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata);
+int     render_bin_output_region_blocks(FILE *stream, hid_t region_space,
+            hid_t region_id, hid_t container);
+int     render_bin_output_region_data_points(FILE *stream, hid_t region_space, hid_t region_id,
+            hid_t container, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata);
+int     render_bin_output_region_points(FILE *stream, hid_t region_space,
+            hid_t region_id, hid_t container);
+/** frees memory held by array of strings */
+void h5str_array_free(char **strs, size_t len) {
+    size_t i;
+
+    if (!strs || len <= 0)
+        return;
+
+    for (i = 0; i < len; i++) {
+        if (*(strs + i))
+            free(*(strs + i));
+    } /* for (i=0; i<n; i++)*/
+    free(strs);
+}
+
+/** allocate a new str with given length */
+void h5str_new(h5str_t *str, size_t len) {
+    if (str && len > 0) {
+        str->s = (char *) malloc(len);
+        str->max = len;
+        str->s[0] = '\0';
+    }
+}
+
+/** free string memory */
+void h5str_free(h5str_t *str) {
+    if (str && str->max > 0) {
+        free(str->s);
+        memset(str, 0, sizeof(h5str_t));
+    }
+}
+
+/** reset the max size of the string */
+void h5str_resize(h5str_t *str, size_t new_len) {
+    char *new_str;
+
+    if (!str || new_len <= 0 || str->max == new_len)
+        return;
+
+    new_str = (char *) malloc(new_len);
+    if (new_len > str->max) /* increase memory */
+        strcpy(new_str, str->s);
+    else
+        strncpy(new_str, str->s, new_len - 1);
+
+    free(str->s);
+    str->s = new_str;
+    str->max = new_len;
+}
+
+/* appends a copy of the string pointed to by cstr to the h5str.
+ Return Value:
+ the char string point to str->s
+ */
+char* h5str_append(h5str_t *str, const char* cstr) {
+    size_t len;
+
+    if (!str)
+        return NULL;
+    else if (!cstr)
+        return str->s;
+
+    len = strlen(str->s) + strlen(cstr);
+    while (len >= str->max) /* not enough to hold the new string, double the space */
+    {
+        h5str_resize(str, str->max * 2);
+    }
+
+    return strcat(str->s, cstr);
+}
+
+/** print value of a data point into string.
+ Return Value:
+ On success, the total number of characters printed is returned.
+ On error, a negative number is returned.
+ */
+int h5str_sprintf(h5str_t *str, hid_t container, hid_t tid, void *ptr, int expand_data) {
+    unsigned char   tmp_uchar = 0;
+    char            tmp_char = 0;
+    unsigned short  tmp_ushort = 0;
+    short           tmp_short = 0;
+    unsigned int    tmp_uint = 0;
+    int             tmp_int = 0;
+    unsigned long   tmp_ulong = 0;
+    long            tmp_long = 0;
+    unsigned long long tmp_ullong = 0;
+    long long       tmp_llong = 0;
+    float           tmp_float = 0.0;
+    double          tmp_double = 0.0;
+    long double     tmp_ldouble = 0.0;
+    static char     fmt_llong[8], fmt_ullong[8];
+
+    hid_t           mtid = -1;
+    size_t          offset;
+    size_t          nll;
+    char           *this_str;
+    int             this_strlen;
+    int             i;
+    int             n;
+    int             len;
+    hvl_t          *vlptr;
+    char           *cptr = (char*) ptr;
+    unsigned char  *ucptr = (unsigned char*) ptr;
+    H5T_class_t     tclass = H5Tget_class(tid);
+    size_t          size = H5Tget_size(tid);
+    H5T_sign_t      nsign = H5Tget_sign(tid);
+    int bdata_print = 0;
+
+    if (!str || !ptr)
+        return -1;
+
+    /* Build default formats for long long types */
+    if (!fmt_llong[0]) {
+        sprintf(fmt_llong, "%%%sd", H5_PRINTF_LL_WIDTH);
+        sprintf(fmt_ullong, "%%%su", H5_PRINTF_LL_WIDTH);
+    }
+
+    this_str = NULL;
+    this_strlen = 0;
+
+	switch (tclass) {
+		case H5T_FLOAT:
+		    if (sizeof(float) == size) {
+		        /* if (H5Tequal(tid, H5T_NATIVE_FLOAT)) */
+				memcpy(&tmp_float, ptr, sizeof(float));
+				this_str = (char*) malloc(25);
+				sprintf(this_str, "%g", tmp_float);
+			}
+			else if (sizeof(double) == size) {
+                /* if (H5Tequal(tid, H5T_NATIVE_DOUBLE)) */
+				memcpy(&tmp_double, ptr, sizeof(double));
+				this_str = (char*) malloc(25);
+				sprintf(this_str, "%g", tmp_double);
+			}
+#if H5_SIZEOF_LONG_DOUBLE !=0
+            else if (sizeof(long double) == size) {
+                /* if (H5Tequal(tid, H5T_NATIVE_LDOUBLE)) */
+				memcpy(&tmp_ldouble, ptr, sizeof(long double));
+				this_str = (char*) malloc(27);
+				sprintf(this_str, "%Lf", tmp_ldouble);
+			}
+#endif
+			break;
+		case H5T_STRING:
+			{
+				char *tmp_str;
+				size = 0;
+
+				if (H5Tis_variable_str(tid)) {
+					tmp_str = *(char**) ptr;
+					if (tmp_str != NULL)
+						size = strlen(tmp_str);
+				}
+				else {
+					tmp_str = cptr;
+				}
+
+				/* Check for NULL pointer for string */
+				if (tmp_str == NULL) {
+					this_str = (char *) malloc(5);
+					strncpy(this_str, "NULL", 4);
+				}
+				else {
+					if (size > 0) {
+						this_str = (char *) malloc(size+1);
+						strncpy(this_str, tmp_str, size);
+					}
+				}
+			}
+			break;
+		case H5T_INTEGER:
+		    if (sizeof(char) == size) {
+		        if(H5T_SGN_NONE == nsign) {
+		            /* if (H5Tequal(tid, H5T_NATIVE_UCHAR)) */
+		            memcpy(&tmp_uchar, ptr, sizeof(unsigned char));
+		            this_str = (char*) malloc(7);
+		            sprintf(this_str, "%u", tmp_uchar);
+		        }
+		        else {
+		            /* if (H5Tequal(tid, H5T_NATIVE_SCHAR)) */
+		            memcpy(&tmp_char, ptr, sizeof(char));
+		            this_str = (char*) malloc(7);
+		            sprintf(this_str, "%hhd", tmp_char);
+		        }
+		    }
+		    else if (sizeof(int) == size) {
+		        if(H5T_SGN_NONE == nsign) {
+		            /* if (H5Tequal(tid, H5T_NATIVE_UINT)) */
+		            memcpy(&tmp_uint, ptr, sizeof(unsigned int));
+		            this_str = (char*) malloc(14);
+		            sprintf(this_str, "%u", tmp_uint);
+		        }
+		        else {
+		            /* if (H5Tequal(tid, H5T_NATIVE_INT)) */
+		            memcpy(&tmp_int, ptr, sizeof(int));
+		            this_str = (char*) malloc(14);
+		            sprintf(this_str, "%d", tmp_int);
+		        }
+		    }
+			else if (sizeof(short) == size) {
+                if(H5T_SGN_NONE == nsign) {
+                    /* if (H5Tequal(tid, H5T_NATIVE_USHORT)) */
+                    memcpy(&tmp_ushort, ptr, sizeof(unsigned short));
+                    this_str = (char*) malloc(9);
+                    sprintf(this_str, "%u", tmp_ushort);
+                }
+                else {
+                    /* if (H5Tequal(tid, H5T_NATIVE_SHORT)) */
+                    memcpy(&tmp_short, ptr, sizeof(short));
+                    this_str = (char*) malloc(9);
+                    sprintf(this_str, "%d", tmp_short);
+                }
+			}
+			else if (sizeof(long) == size) {
+                if(H5T_SGN_NONE == nsign) {
+                    /* if (H5Tequal(tid, H5T_NATIVE_ULONG)) */
+                    memcpy(&tmp_ulong, ptr, sizeof(unsigned long));
+                    this_str = (char*) malloc(23);
+                    sprintf(this_str, "%lu", tmp_ulong);
+                }
+                else {
+                    /* if (H5Tequal(tid, H5T_NATIVE_LONG)) */
+                    memcpy(&tmp_long, ptr, sizeof(long));
+                    this_str = (char*) malloc(23);
+                    sprintf(this_str, "%ld", tmp_long);
+                }
+			}
+			else if (sizeof(long long) == size) {
+                if(H5T_SGN_NONE == nsign) {
+                    /* if (H5Tequal(tid, H5T_NATIVE_ULLONG)) */
+                    memcpy(&tmp_ullong, ptr, sizeof(unsigned long long));
+                    this_str = (char*) malloc(25);
+                    sprintf(this_str, fmt_ullong, tmp_ullong);
+                }
+                else {
+                    /* if (H5Tequal(tid, H5T_NATIVE_LLONG)) */
+                    memcpy(&tmp_llong, ptr, sizeof(long long));
+                    this_str = (char*) malloc(25);
+                    sprintf(this_str, fmt_llong, tmp_llong);
+                }
+			}
+			break;
+		case H5T_COMPOUND:
+			{
+                unsigned i;
+				n = H5Tget_nmembers(tid);
+				h5str_append(str, " {");
+
+				for (i = 0; i < n; i++) {
+					offset = H5Tget_member_offset(tid, i);
+					mtid = H5Tget_member_type(tid, i);
+					h5str_sprintf(str, container, mtid, cptr + offset, expand_data);
+					if (i < n - 1)
+						h5str_append(str, ", ");
+					H5Tclose(mtid);
+				}
+				h5str_append(str, "} ");
+			}
+			break;
+		case H5T_ENUM:
+			{
+				char enum_name[1024];
+				if (H5Tenum_nameof(tid, ptr, enum_name, sizeof enum_name) >= 0) {
+					h5str_append(str, enum_name);
+				}
+				else {
+					size_t i;
+					nll = H5Tget_size(tid);
+					this_str = (char*) malloc(4 * (nll + 1));
+
+					if (1 == nll) {
+						sprintf(this_str, "0x%02x", ucptr[0]);
+					}
+					else {
+						for (i = 0; i < (int)nll; i++)
+							sprintf(this_str, "%s%02x", i ? ":" : "", ucptr[i]);
+					}
+				}
+			}
+			break;
+		case H5T_REFERENCE:
+		    if (h5str_is_zero(ptr, size)) {
+		        h5str_append(str, "NULL");
+		    }
+		    else {
+		        if (H5R_DSET_REG_REF_BUF_SIZE == size) {
+		            /* if (H5Tequal(tid, H5T_STD_REF_DSETREG)) */
+		            /*
+		             * Dataset region reference --
+		             * show the type and the referenced object
+		             */
+		            char         ref_name[1024];
+                    hid_t        region_obj;
+                    hid_t        region;
+		            H5S_sel_type region_type;
+
+		            /* get name of the dataset the region reference points to using H5Rget_name */
+		            region_obj = H5Rdereference(container, H5R_DATASET_REGION, ptr);
+		            if (region_obj >= 0) {
+		                region = H5Rget_region(container, H5R_DATASET_REGION, ptr);
+		                if (region >= 0) {
+		                	if(expand_data) {
+								region_type = H5Sget_select_type(region);
+								if(region_type==H5S_SEL_POINTS) {
+									h5str_dump_region_points_data(str, region, region_obj);
+								}
+								else {
+									h5str_dump_region_blocks_data(str, region, region_obj);
+								}
+		                	}
+		                	else {
+								if(H5Rget_name(region_obj, H5R_DATASET_REGION, ptr, (char*)ref_name, 1024) >= 0) {
+									h5str_append(str, ref_name);
+								}
+
+								region_type = H5Sget_select_type(region);
+
+								if(region_type==H5S_SEL_POINTS) {
+									h5str_append(str, " REGION_TYPE POINT");
+									h5str_dump_region_points(str, region, region_obj);
+								}
+								else {
+									h5str_append(str, " REGION_TYPE BLOCK");
+									h5str_dump_region_blocks(str, region, region_obj);
+								}
+		                	}
+
+		                    H5Sclose(region);
+		                }
+		                H5Dclose(region_obj);
+		            }
+		        }
+		        else if (H5R_OBJ_REF_BUF_SIZE == size) {
+		            /* if (H5Tequal(tid, H5T_STD_REF_OBJ)) */
+                    /*
+                     * Object references -- show the type and OID of the referenced
+                     * object.
+                     */
+                    H5O_info_t  oi;
+                    hid_t       obj;
+
+		            this_str = (char*) malloc(64);
+                    obj = H5Rdereference(container, H5R_OBJECT, ptr);
+                    H5Oget_info(obj, &oi);
+
+                    /* Print object data and close object */
+                    sprintf(this_str, "%u-%lu", (unsigned) oi.type, oi.addr);
+                    H5Oclose(obj);
+		        }
+			}
+			break;
+		case H5T_ARRAY:
+			{
+				int rank = 0;
+				hsize_t i, dims[H5S_MAX_RANK], total_elmts;
+				h5str_append(str, "[ ");
+
+				mtid = H5Tget_super(tid);
+				size = H5Tget_size(mtid);
+				rank = H5Tget_array_ndims(tid);
+
+				H5Tget_array_dims2(tid, dims);
+
+				total_elmts = 1;
+				for (i = 0; i < rank; i++)
+					total_elmts *= dims[i];
+
+				for (i = 0; i < total_elmts; i++) {
+					h5str_sprintf(str, container, mtid, cptr + i * size, expand_data);
+					if (i < total_elmts - 1)
+						h5str_append(str, ", ");
+				}
+				H5Tclose(mtid);
+				h5str_append(str, "] ");
+			}
+			break;
+		case H5T_VLEN:
+			{
+				unsigned int i;
+				mtid = H5Tget_super(tid);
+				size = H5Tget_size(mtid);
+
+				vlptr = (hvl_t *) cptr;
+
+				nll = vlptr->len;
+				for (i = 0; i < (int)nll; i++) {
+					h5str_sprintf(str, container, mtid, ((char *) (vlptr->p)) + i * size, expand_data);
+					if (i < (int)nll - 1)
+						h5str_append(str, ", ");
+				}
+				H5Tclose(mtid);
+			}
+			break;
+
+		default:
+			{
+				/* All other types get printed as hexadecimal */
+				size_t i;
+				nll = H5Tget_size(tid);
+				this_str = (char*) malloc(4 * (nll + 1));
+
+				if (1 == nll) {
+					sprintf(this_str, "0x%02x", ucptr[0]);
+				}
+				else {
+					for (i = 0; i < (int)nll; i++)
+						sprintf(this_str, "%s%02x", i ? ":" : "", ucptr[i]);
+				}
+			}
+            break;
+	} /* end switch */
+
+    if (this_str) {
+        h5str_append(str, this_str);
+        this_strlen = strlen(str->s);
+        free(this_str);
+    }
+
+    return this_strlen;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region blocks.
+ *
+ * Description:
+ *      This is a special case subfunction to print the data in a region reference of type blocks.
+ *
+ * Return:
+ *      The function returns FAIL if there was an error, otherwise SUCEED
+ *-------------------------------------------------------------------------
+ */
+int h5str_print_region_data_blocks(hid_t region_id,
+        h5str_t *str, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata)
+{
+    hsize_t     *dims1 = NULL;
+    hsize_t     *start = NULL;
+    hsize_t     *count = NULL;
+    hsize_t      blkndx;
+    hsize_t      total_size[H5S_MAX_RANK];
+    unsigned int region_flags; /* buffer extent flags */
+    hsize_t      numelem;
+    hsize_t      numindex;
+    size_t       jndx;
+    unsigned     indx;
+    int          type_size;
+    int          ret_value = SUCCEED;
+    hid_t        mem_space = -1;
+    hid_t        sid1 = -1;
+    void        *region_buf = NULL;
+
+    /* Get the dataspace of the dataset */
+    if((sid1 = H5Dget_space(region_id)) >= 0) {
+
+        /* Allocate space for the dimension array */
+        if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+
+            /* find the dimensions of each data space from the block coordinates */
+            numelem = 1;
+            for (jndx = 0; jndx < ndims; jndx++) {
+                dims1[jndx] = ptdata[jndx + ndims] - ptdata[jndx] + 1;
+                numelem = dims1[jndx] * numelem;
+            }
+
+            /* Create dataspace for reading buffer */
+            if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) {
+                if((type_size = H5Tget_size(type_id)) > 0) {
+                    if((region_buf = malloc(type_size * (size_t)numelem)) != NULL) {
+                        /* Select (x , x , ..., x ) x (y , y , ..., y ) hyperslab for reading memory dataset */
+                        /*          1   2        n      1   2        n                                       */
+                        if((start = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+                            if((count = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+                                for (blkndx = 0; blkndx < nblocks; blkndx++) {
+                                    for (indx = 0; indx < ndims; indx++) {
+                                        start[indx] = ptdata[indx + blkndx * ndims * 2];
+                                        count[indx] = dims1[indx];
+                                    }
+
+                                    if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) {
+                                        if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) {
+                                            if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+                                                for (numindex = 0; numindex < numelem; numindex++) {
+                                                    h5str_sprintf(str, region_id, type_id, ((char*)region_buf + numindex * type_size), 1);
+
+                                                    if (numindex + 1 < numelem)
+                                                        h5str_append(str, ", ");
+                                                } /* end for (jndx = 0; jndx < numelem; jndx++, region_elmtno++, ctx.cur_elmt++) */
+                                            } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+                                        } /* end if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) */
+                                    } /* end if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) */
+                                } /* end for (blkndx = 0; blkndx < nblocks; blkndx++) */
+
+                                free(count);
+                            } /* end if((count = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+                            else
+                                ret_value = -1;
+
+                            free(start);
+                        } /* end if((start = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+                        else
+                            ret_value = -1;
+
+                        free(region_buf);
+                    } /* end if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) */
+                    else
+                        ret_value = -1;
+                } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+                else
+                    ret_value = -1;
+
+                if(H5Sclose(mem_space) < 0)
+                    ret_value = -1;
+            } /* end if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) */
+            else
+                ret_value = -1;
+
+            free(dims1);
+        } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+        else
+            ret_value = -1;
+
+        if(H5Sclose(sid1) < 0)
+            ret_value = -1;
+    } /* end if((sid1 = H5Dget_space(region_id)) >= 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+int h5str_dump_region_blocks_data(h5str_t *str, hid_t region, hid_t region_id)
+{
+    int        ret_value = 0;
+    hssize_t   nblocks;
+    hsize_t    alloc_size;
+    hsize_t   *ptdata;
+    hid_t      dtype = -1;
+    hid_t      type_id = -1;
+    char       tmp_str[256];
+    int        ndims = H5Sget_simple_extent_ndims(region);
+
+    /*
+     * This function fails if the region does not have blocks.
+     */
+    H5E_BEGIN_TRY {
+        nblocks = H5Sget_select_hyper_nblocks(region);
+    } H5E_END_TRY;
+
+    /* Print block information */
+    if (nblocks > 0) {
+        int i;
+
+        alloc_size = nblocks * ndims * 2 * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+            ptdata = (hsize_t *) malloc((size_t) alloc_size);
+            H5Sget_select_hyper_blocklist(region, (hsize_t) 0,
+                    (hsize_t) nblocks, ptdata);
+
+
+            if((dtype = H5Dget_type(region_id)) >= 0) {
+                if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+
+                    h5str_print_region_data_blocks(region_id, str, ndims, type_id, nblocks, ptdata);
+
+                    if(H5Tclose(type_id) < 0)
+                        ret_value = -1;
+                } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+                else
+                    ret_value = -1;
+
+                if(H5Tclose(dtype) < 0)
+                    ret_value = -1;
+            } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+            else
+                ret_value = -1;
+            free(ptdata);
+        } /* if (alloc_size == (hsize_t)((size_t)alloc_size)) */
+    } /* if (nblocks > 0) */
+
+    return ret_value;
+}
+
+int h5str_dump_region_blocks(h5str_t *str, hid_t region, hid_t region_id)
+{
+    int        ret_value = 0;
+    hssize_t   nblocks;
+    hsize_t    alloc_size;
+    hsize_t   *ptdata;
+    hid_t      dtype = -1;
+    hid_t      type_id = -1;
+    char       tmp_str[256];
+    int        ndims = H5Sget_simple_extent_ndims(region);
+
+    /*
+     * This function fails if the region does not have blocks.
+     */
+    H5E_BEGIN_TRY {
+        nblocks = H5Sget_select_hyper_nblocks(region);
+    } H5E_END_TRY;
+
+    /* Print block information */
+    if (nblocks > 0) {
+        int i;
+
+        alloc_size = nblocks * ndims * 2 * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+            ptdata = (hsize_t *) malloc((size_t) alloc_size);
+            H5Sget_select_hyper_blocklist(region, (hsize_t) 0,
+                    (hsize_t) nblocks, ptdata);
+
+            h5str_append(str, " {");
+            for (i = 0; i < nblocks; i++) {
+                int j;
+
+                h5str_append(str, " ");
+
+                /* Start coordinates and opposite corner */
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : "(",
+                            (unsigned long) ptdata[i * 2 * ndims + j]);
+                    h5str_append(str, tmp_str);
+                }
+
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : ")-(",
+                            (unsigned long) ptdata[i * 2 * ndims + j + ndims]);
+                    h5str_append(str, tmp_str);
+                }
+                h5str_append(str, ") ");
+                tmp_str[0] = '\0';
+            }
+            h5str_append(str, " }");
+
+            free(ptdata);
+        } /* if (alloc_size == (hsize_t)((size_t)alloc_size)) */
+    } /* if (nblocks > 0) */
+
+    return ret_value;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region points.
+ *
+ * Description:
+ *      This is a special case subfunction to print the data in a region reference of type points.
+ *
+ * Return:
+ *      The function returns FAIL on error, otherwise SUCCEED
+ *-------------------------------------------------------------------------
+ */
+int
+h5str_print_region_data_points(hid_t region_space, hid_t region_id,
+        h5str_t *str, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata)
+{
+    hsize_t        *dims1 = NULL;
+    hsize_t         total_size[H5S_MAX_RANK];
+    size_t          jndx;
+    unsigned        indx;
+    int             type_size;
+    int             ret_value = SUCCEED;
+    unsigned int    region_flags; /* buffer extent flags */
+    hid_t           mem_space = -1;
+    void           *region_buf = NULL;
+    char            tmp_str[256];
+
+    /* Allocate space for the dimension array */
+    if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+
+        dims1[0] = npoints;
+
+        /* Create dataspace for reading buffer */
+        if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) {
+
+            if((type_size = H5Tget_size(type_id)) > 0) {
+
+                if((region_buf = malloc(type_size * (size_t)npoints)) != NULL) {
+
+                    if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) {
+
+                        for (jndx = 0; jndx < npoints; jndx++) {
+                            if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+
+                                h5str_sprintf(str, region_id, type_id, ((char*)region_buf + jndx * type_size), 1);
+
+                                if (jndx + 1 < npoints)
+                                    h5str_append(str, ", ");
+
+                            } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+                        } /* end for (jndx = 0; jndx < npoints; jndx++, elmtno++) */
+                    } /* end if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) */
+                    else
+                        ret_value = -1;
+
+                    free(region_buf);
+                } /* end if((region_buf = HDmalloc(type_size * (size_t)npoints)) != NULL) */
+                else
+                    ret_value = -1;
+            } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+            else
+                ret_value = -1;
+
+            if(H5Sclose(mem_space) < 0)
+                ret_value = -1;
+        } /* end if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) */
+        else
+            ret_value = -1;
+        free(dims1);
+    } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+int h5str_dump_region_points_data(h5str_t *str, hid_t region, hid_t region_id)
+{
+    int        ret_value = 0;
+    hssize_t   npoints;
+    hsize_t    alloc_size;
+    hsize_t   *ptdata;
+    char       tmp_str[256];
+    hid_t      dtype = -1;
+    hid_t      type_id = -1;
+    int        ndims = H5Sget_simple_extent_ndims(region);
+
+    /*
+     * This function fails if the region does not have points.
+     */
+    H5E_BEGIN_TRY {
+        npoints = H5Sget_select_elem_npoints(region);
+    } H5E_END_TRY;
+
+    /* Print point information */
+    if (npoints > 0) {
+        int i;
+
+        alloc_size = npoints * ndims * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+            ptdata = (hsize_t *) malloc((size_t) alloc_size);
+            H5Sget_select_elem_pointlist(region, (hsize_t) 0,
+                    (hsize_t) npoints, ptdata);
+
+            if((dtype = H5Dget_type(region_id)) >= 0) {
+                if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+
+                    h5str_print_region_data_points(region, region_id,
+                            str, ndims, type_id, npoints, ptdata);
+
+                    if(H5Tclose(type_id) < 0)
+                        ret_value = -1;
+                } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+                else
+                    ret_value = -1;
+
+                if(H5Tclose(dtype) < 0)
+                    ret_value = -1;
+            } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+            else
+                ret_value = -1;
+            free(ptdata);
+        }
+    }
+
+    return ret_value;
+}
+
+int h5str_dump_region_points(h5str_t *str, hid_t region, hid_t region_id)
+{
+    int        ret_value = 0;
+    hssize_t   npoints;
+    hsize_t    alloc_size;
+    hsize_t   *ptdata;
+    char       tmp_str[256];
+    hid_t      dtype = -1;
+    hid_t      type_id = -1;
+    int        ndims = H5Sget_simple_extent_ndims(region);
+
+    /*
+     * This function fails if the region does not have points.
+     */
+    H5E_BEGIN_TRY {
+        npoints = H5Sget_select_elem_npoints(region);
+    } H5E_END_TRY;
+
+    /* Print point information */
+    if (npoints > 0) {
+        int i;
+
+        alloc_size = npoints * ndims * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+            ptdata = (hsize_t *) malloc((size_t) alloc_size);
+            H5Sget_select_elem_pointlist(region, (hsize_t) 0,
+                    (hsize_t) npoints, ptdata);
+
+            h5str_append(str, " {");
+            for (i = 0; i < npoints; i++) {
+                int j;
+
+                h5str_append(str, " ");
+
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : "(",
+                            (unsigned long) (ptdata[i * ndims + j]));
+                    h5str_append(str, tmp_str);
+                }
+
+                h5str_append(str, ") ");
+            }
+            h5str_append(str, " }");
+
+            free(ptdata);
+        }
+    }
+
+    return ret_value;
+}
+
+int h5str_is_zero(const void *_mem, size_t size) {
+    const unsigned char *mem = (const unsigned char *) _mem;
+
+    while (size-- > 0)
+        if (mem[size])
+            return 0;
+
+    return 1;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_detect_vlen_str
+ *
+ * Purpose: Recursive check for variable length string of a datatype.
+ *
+ * Return:
+ *    TRUE : type conatains any variable length string
+ *    FALSE : type doesn't contain any variable length string
+ *    Negative value: error occur
+ *
+ *-------------------------------------------------------------------------
+ */
+htri_t
+h5str_detect_vlen_str(hid_t tid)
+{
+    H5T_class_t tclass = H5T_NO_CLASS;
+    htri_t ret = 0;
+
+    ret = H5Tis_variable_str(tid);
+    if((ret == 1) || (ret < 0))
+        goto done;
+
+    tclass = H5Tget_class(tid);
+    if(tclass == H5T_ARRAY || tclass == H5T_VLEN) {
+        hid_t btid = H5Tget_super(tid);
+
+        if(btid < 0) {
+            ret = (htri_t)btid;
+            goto done;
+        }
+        ret = h5str_detect_vlen_str(btid);
+        if((ret == 1) || (ret < 0)) {
+            H5Tclose(btid);
+            goto done;
+        }
+    }
+    else if(tclass == H5T_COMPOUND) {
+        int i = 0;
+        int n = H5Tget_nmembers(tid);
+
+        if(n < 0) {
+            n = ret;
+            goto done;
+        }
+
+        for(i = 0; i < n; i++) {
+            hid_t mtid = H5Tget_member_type(tid, i);
+
+            ret = h5str_detect_vlen_str(mtid);
+            if((ret == 1) || (ret < 0)) {
+                H5Tclose(mtid);
+                goto done;
+            }
+            H5Tclose(mtid);
+        }
+    }
+
+done:
+    return ret;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_native_type
+ *
+ * Purpose: Wrapper around H5Tget_native_type() to work around
+ *          Problems with bitfields.
+ *
+ * Return: Success:    datatype ID
+ *         Failure:    FAIL
+ *-------------------------------------------------------------------------
+ */
+hid_t h5str_get_native_type(hid_t type)
+{
+    hid_t p_type;
+    H5T_class_t type_class;
+
+    type_class = H5Tget_class(type);
+    if(type_class==H5T_BITFIELD)
+        p_type=H5Tcopy(type);
+    else
+        p_type = H5Tget_native_type(type,H5T_DIR_DEFAULT);
+
+    return(p_type);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_little_endian_type
+ *
+ * Purpose: Get a little endian type from a file type
+ *
+ * Return: Success:    datatype ID
+ *         Failure:    FAIL
+ *-------------------------------------------------------------------------
+ */
+hid_t h5str_get_little_endian_type(hid_t tid)
+{
+    hid_t       p_type=-1;
+    H5T_class_t type_class;
+    size_t      size;
+    H5T_sign_t  sign;
+
+    type_class = H5Tget_class(tid);
+    size       = H5Tget_size(tid);
+    sign       = H5Tget_sign(tid);
+
+    switch( type_class )
+    {
+    case H5T_INTEGER:
+    {
+        if ( size == 1 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I8LE);
+        else if ( size == 2 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I16LE);
+        else if ( size == 4 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I32LE);
+        else if ( size == 8 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I64LE);
+        else if ( size == 1 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U8LE);
+        else if ( size == 2 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U16LE);
+        else if ( size == 4 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U32LE);
+        else if ( size == 8 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U64LE);
+    }
+    break;
+
+    case H5T_FLOAT:
+        if ( size == 4)
+            p_type=H5Tcopy(H5T_IEEE_F32LE);
+        else if ( size == 8)
+            p_type=H5Tcopy(H5T_IEEE_F64LE);
+        break;
+
+    case H5T_TIME:
+    case H5T_BITFIELD:
+    case H5T_OPAQUE:
+    case H5T_STRING:
+    case H5T_COMPOUND:
+    case H5T_REFERENCE:
+    case H5T_ENUM:
+    case H5T_VLEN:
+    case H5T_ARRAY:
+        break;
+
+    default:
+        break;
+
+    }
+
+    return(p_type);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_big_endian_type
+ *
+ * Purpose: Get a big endian type from a file type
+ *
+ * Return: Success:    datatype ID
+ *         Failure:    FAIL
+ *-------------------------------------------------------------------------
+ */
+hid_t h5str_get_big_endian_type(hid_t tid)
+{
+    hid_t       p_type=-1;
+    H5T_class_t type_class;
+    size_t      size;
+    H5T_sign_t  sign;
+
+    type_class = H5Tget_class(tid);
+    size       = H5Tget_size(tid);
+    sign       = H5Tget_sign(tid);
+
+    switch( type_class )
+    {
+    case H5T_INTEGER:
+    {
+        if ( size == 1 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I8BE);
+        else if ( size == 2 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I16BE);
+        else if ( size == 4 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I32BE);
+        else if ( size == 8 && sign == H5T_SGN_2)
+            p_type=H5Tcopy(H5T_STD_I64BE);
+        else if ( size == 1 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U8BE);
+        else if ( size == 2 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U16BE);
+        else if ( size == 4 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U32BE);
+        else if ( size == 8 && sign == H5T_SGN_NONE)
+            p_type=H5Tcopy(H5T_STD_U64BE);
+    }
+    break;
+
+    case H5T_FLOAT:
+        if ( size == 4)
+            p_type=H5Tcopy(H5T_IEEE_F32BE);
+        else if ( size == 8)
+            p_type=H5Tcopy(H5T_IEEE_F64BE);
+        break;
+
+    case H5T_TIME:
+    case H5T_BITFIELD:
+    case H5T_OPAQUE:
+    case H5T_STRING:
+    case H5T_COMPOUND:
+    case H5T_REFERENCE:
+    case H5T_ENUM:
+    case H5T_VLEN:
+    case H5T_ARRAY:
+        break;
+
+    default:
+        break;
+
+    }
+
+    return(p_type);
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_detect_vlen
+ *
+ * Purpose: Recursive check for any variable length data in given type.
+ *
+ * Return:
+ *    1 : type conatains any variable length data
+ *    0 : type doesn't contain any variable length data
+ *    Negative value: error occur
+ *-------------------------------------------------------------------------
+ */
+htri_t
+h5str_detect_vlen(hid_t tid)
+{
+    htri_t ret;
+
+    /* recursive detect any vlen data values in type (compound, array ...) */
+    ret = H5Tdetect_class(tid, H5T_VLEN);
+    if((ret == 1) || (ret < 0))
+        goto done;
+
+    /* recursive detect any vlen string in type (compound, array ...) */
+    ret = h5str_detect_vlen_str(tid);
+    if((ret == 1) || (ret < 0))
+        goto done;
+
+done:
+    return ret;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: render_bin_output
+ *
+ * Purpose: Write one element of memory buffer to a binary file stream
+ *
+ * Return: Success:    SUCCEED
+ *         Failure:    FAIL
+ *-------------------------------------------------------------------------
+ */
+int h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hsize_t block_nelmts)
+{
+    int                ret_value = 0;
+    unsigned char     *mem  = (unsigned char*)_mem;
+    size_t             size;   /* datum size */
+    hsize_t            block_index;
+    H5T_class_t        type_class;
+
+    if((size = H5Tget_size(tid)) > 0) {
+
+        if((type_class = H5Tget_class(tid)) >= 0) {
+
+            switch (type_class) {
+                case H5T_INTEGER:
+                case H5T_FLOAT:
+                case H5T_ENUM:
+                    block_index = block_nelmts * size;
+                    while(block_index > 0) {
+                        size_t bytes_in        = 0;    /* # of bytes to write  */
+                        size_t bytes_wrote     = 0;    /* # of bytes written   */
+                        size_t item_size       = size;    /* size of items in bytes */
+
+                        if(block_index > sizeof(size_t))
+                            bytes_in = sizeof(size_t);
+                        else
+                            bytes_in = (size_t)block_index;
+
+                        bytes_wrote = fwrite(mem, 1, bytes_in, stream);
+
+                        if(bytes_wrote != bytes_in || (0 == bytes_wrote && ferror(stream))) {
+                            ret_value = -1;
+                            break;
+                        }
+
+                        block_index -= (hsize_t)bytes_wrote;
+                        mem = mem + bytes_wrote;
+                    }
+                    break;
+                case H5T_STRING:
+                {
+                    unsigned int    i;
+                    H5T_str_t       pad;
+                    char           *s;
+                    unsigned char   tempuchar;
+
+                    pad = H5Tget_strpad(tid);
+
+                    for (block_index = 0; block_index < block_nelmts; block_index++) {
+                        mem = ((unsigned char*)_mem) + block_index * size;
+
+                        if (H5Tis_variable_str(tid)) {
+                            s = *(char**) mem;
+                            if (s != NULL)
+                                size = strlen(s);
+                        }
+                        else {
+                            s = (char *) mem;
+                        }
+                        for (i = 0; i < size && (s[i] || pad != H5T_STR_NULLTERM); i++) {
+                            memcpy(&tempuchar, &s[i], sizeof(unsigned char));
+                            if (1 != fwrite(&tempuchar, sizeof(unsigned char), 1, stream)) {
+                                ret_value = -1;
+                                break;
+                            }
+                        } /* i */
+                        if(ret_value < 0)
+                            break;
+                    } /* for (block_index = 0; block_index < block_nelmts; block_index++) */
+                }
+                break;
+                case H5T_COMPOUND:
+                {
+                    unsigned j;
+                    hid_t    memb;
+                    unsigned nmembs;
+                    size_t   offset;
+
+                    nmembs = H5Tget_nmembers(tid);
+
+                    for (block_index = 0; block_index < block_nelmts; block_index++) {
+                        mem = ((unsigned char*)_mem) + block_index * size;
+                        for (j = 0; j < nmembs; j++) {
+                            offset = H5Tget_member_offset(tid, j);
+                            memb   = H5Tget_member_type(tid, j);
+
+                            if (h5str_render_bin_output(stream, container, memb, mem + offset, 1) < 0) {
+                                H5Tclose(memb);
+                                ret_value = -1;
+                                break;
+                            }
+
+                            H5Tclose(memb);
+                        }
+                        if(ret_value < 0)
+                            break;
+                    }
+                }
+                break;
+                case H5T_ARRAY:
+                {
+                    int     k, ndims;
+                    hsize_t i, dims[H5S_MAX_RANK], temp_nelmts, nelmts;
+                    hid_t   memb;
+
+                    /* get the array's base datatype for each element */
+                    memb = H5Tget_super(tid);
+                    ndims = H5Tget_array_ndims(tid);
+                    H5Tget_array_dims2(tid, dims);
+
+                    /* calculate the number of array elements */
+                    for (k = 0, nelmts = 1; k < ndims; k++) {
+                        temp_nelmts = nelmts;
+                        temp_nelmts *= dims[k];
+                        nelmts = (size_t) temp_nelmts;
+                    }
+
+                    for (block_index = 0; block_index < block_nelmts; block_index++) {
+                        mem = ((unsigned char*)_mem) + block_index * size;
+                        /* dump the array element */
+                        if (h5str_render_bin_output(stream, container, memb, mem, nelmts) < 0) {
+                            ret_value = -1;
+                            break;
+                        }
+                    }
+                    H5Tclose(memb);
+                }
+                break;
+                case H5T_VLEN:
+                {
+                    unsigned int i;
+                    hsize_t      nelmts;
+                    hid_t        memb;
+
+                    /* get the VL sequences's base datatype for each element */
+                    memb = H5Tget_super(tid);
+
+                    for (block_index = 0; block_index < block_nelmts; block_index++) {
+                        mem = ((unsigned char*)_mem) + block_index * size;
+                        /* Get the number of sequence elements */
+                        nelmts = ((hvl_t *) mem)->len;
+
+                        /* dump the array element */
+                        if (h5str_render_bin_output(stream, container, memb, ((char *) (((hvl_t *) mem)->p)), nelmts) < 0) {
+                            ret_value = -1;
+                            break;
+                        }
+                    }
+                    H5Tclose(memb);
+                }
+                break;
+                case H5T_REFERENCE:
+                {
+                    if (H5Tequal(tid, H5T_STD_REF_DSETREG)) {
+                        /* region data */
+                        hid_t   region_id, region_space;
+                        H5S_sel_type region_type;
+
+                        for (block_index = 0; block_index < block_nelmts; block_index++) {
+                            mem = ((unsigned char*)_mem) + block_index * size;
+                            region_id = H5Rdereference(container, H5R_DATASET_REGION, mem);
+                            if (region_id >= 0) {
+                                region_space = H5Rget_region(container, H5R_DATASET_REGION, mem);
+                                if (region_space >= 0) {
+                                    region_type = H5Sget_select_type(region_space);
+                                    if(region_type == H5S_SEL_POINTS)
+                                        ret_value = render_bin_output_region_points(stream, region_space, region_id, container);
+                                    else
+                                        ret_value = render_bin_output_region_blocks(stream, region_space, region_id, container);
+                                    H5Sclose(region_space);
+                                } /* end if (region_space >= 0) */
+                                H5Dclose(region_id);
+                            } /* end if (region_id >= 0) */
+                            if(ret_value < 0)
+                                break;
+                        }
+                    }
+                    else if (H5Tequal(tid, H5T_STD_REF_OBJ)) {
+                        ;
+                    }
+                }
+                break;
+                default:
+                    for (block_index = 0; block_index < block_nelmts; block_index++) {
+                        mem = ((unsigned char*)_mem) + block_index * size;
+                        if (size != fwrite(mem, sizeof(char), size, stream)) {
+                            ret_value = -1;
+                            break;
+                        }
+                    }
+                    break;
+            }
+        } /* end if((type_class = H5Tget_class(tid)) >= 0) */
+        else
+            ret_value = -1;
+    } /* end if((size = H5Tget_size(tid)) > 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region blocks.
+ *
+ * Description:
+ *      This is a special case subfunction to print the data in a region reference of type blocks.
+ *
+ * Return:
+ *      The function returns FAIL if there was an error, otherwise SUCEED
+ *
+ *-------------------------------------------------------------------------
+ */
+int render_bin_output_region_data_blocks(FILE *stream, hid_t region_id,
+    hid_t container, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata)
+{
+    hsize_t     *dims1 = NULL;
+    hsize_t     *start = NULL;
+    hsize_t     *count = NULL;
+    hsize_t      numelem;
+    hsize_t      numindex;
+    hsize_t      total_size[H5S_MAX_RANK];
+    int          jndx;
+    int          type_size;
+    hid_t        mem_space = -1;
+    void        *region_buf = NULL;
+    int          blkndx;
+    hid_t        sid1 = -1;
+    int          ret_value = SUCCEED;
+
+    /* Get the dataspace of the dataset */
+    if((sid1 = H5Dget_space(region_id)) >= 0) {
+        /* Allocate space for the dimension array */
+        if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+            /* find the dimensions of each data space from the block coordinates */
+            numelem = 1;
+            for (jndx = 0; jndx < ndims; jndx++) {
+                dims1[jndx] = ptdata[jndx + ndims] - ptdata[jndx] + 1;
+                numelem = dims1[jndx] * numelem;
+            }
+
+            /* Create dataspace for reading buffer */
+            if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) {
+                if((type_size = H5Tget_size(type_id)) > 0) {
+                    if((region_buf = malloc(type_size * (size_t)numelem)) != NULL) {
+                        /* Select (x , x , ..., x ) x (y , y , ..., y ) hyperslab for reading memory dataset */
+                        /*          1   2        n      1   2        n                                       */
+                        if((start = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+                            if((count = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+                                for (blkndx = 0; blkndx < nblocks; blkndx++) {
+                                    for (jndx = 0; jndx < ndims; jndx++) {
+                                        start[jndx] = ptdata[jndx + blkndx * ndims * 2];
+                                        count[jndx] = dims1[jndx];
+                                    }
+
+                                    if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) {
+                                        if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) {
+                                            if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+                                                ret_value = h5str_render_bin_output(stream, container, type_id, (char*)region_buf, numelem);
+                                            } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+                                            else {
+                                                ret_value = -1;
+                                                break;
+                                            }
+                                        } /* end if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) */
+                                        else {
+                                            ret_value = -1;
+                                            break;
+                                        }
+                                    } /* end if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) */
+                                    else {
+                                        ret_value = -1;
+                                        break;
+                                    }
+                                    /* Render the region data element end */
+                                } /* end for (blkndx = 0; blkndx < nblocks; blkndx++) */
+
+                                free(count);
+                            } /* end if((count = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+                            else
+                                ret_value = -1;
+                            free(start);
+                        } /* end if((start = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+                        else
+                            ret_value = -1;
+                        free(region_buf);
+                    } /* end if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) */
+                    else
+                        ret_value = -1;
+                } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+                else
+                    ret_value = -1;
+
+                if(H5Sclose(mem_space) < 0)
+                    ret_value = -1;
+            } /* end if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) */
+            else
+                ret_value = -1;
+            free(dims1);
+        } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+        else
+            ret_value = -1;
+        if(H5Sclose(sid1) < 0)
+            ret_value = -1;
+    } /* end if((sid1 = H5Dget_space(region_id)) >= 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print some values from a dataset referenced by region blocks.
+ *
+ * Description:
+ *      This is a special case subfunction to dump a region reference using blocks.
+ *
+ * Return:
+ *      The function returns False if ERROR, otherwise True
+ *
+ *-------------------------------------------------------------------------
+ */
+int render_bin_output_region_blocks(FILE *stream, hid_t region_space, hid_t region_id, hid_t container)
+{
+    int          ret_value = SUCCEED;
+    hssize_t     nblocks;
+    hsize_t      alloc_size;
+    hsize_t     *ptdata = NULL;
+    int          ndims;
+    hid_t        dtype;
+    hid_t        type_id;
+
+    if((nblocks = H5Sget_select_hyper_nblocks(region_space)) > 0) {
+        /* Print block information */
+        if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) {
+            alloc_size = nblocks * ndims * 2 * sizeof(ptdata[0]);
+            if((ptdata = (hsize_t*) malloc((size_t) alloc_size)) != NULL) {
+                if(H5Sget_select_hyper_blocklist(region_space, (hsize_t) 0, (hsize_t) nblocks, ptdata) >= 0) {
+                    if((dtype = H5Dget_type(region_id)) >= 0) {
+                        if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+                            ret_value = render_bin_output_region_data_blocks(stream, region_id, container, ndims,
+                                    type_id, nblocks, ptdata);
+
+                            if(H5Tclose(type_id) < 0)
+                                ret_value = -1;
+                        } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+                        else
+                            ret_value = -1;
+
+                        if(H5Tclose(dtype) < 0)
+                            ret_value = -1;
+                    } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+                    else
+                        ret_value = -1;
+                } /* end if(H5Sget_select_hyper_blocklist(region_space, (hsize_t) 0, (hsize_t) nblocks, ptdata) >= 0) */
+                else
+                    ret_value = -1;
+
+                free(ptdata);
+            } /* end if((ptdata = (hsize_t*) HDmalloc((size_t) alloc_size)) != NULL) */
+            else
+                ret_value = -1;
+        } /* end if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) */
+        else
+            ret_value = -1;
+    } /* end if((nblocks = H5Sget_select_hyper_nblocks(region_space)) > 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region points.
+ *
+ * Description:
+ *      This is a special case subfunction to print the data in a region reference of type points.
+ *
+ * Return:
+ *      The function returns FAIL on error, otherwise SUCCEED
+ *
+ *-------------------------------------------------------------------------
+ */
+int render_bin_output_region_data_points(FILE *stream, hid_t region_space, hid_t region_id,
+        hid_t container, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata)
+{
+    hsize_t *dims1 = NULL;
+    int      jndx;
+    int      type_size;
+    hid_t    mem_space = -1;
+    void    *region_buf = NULL;
+    int      ret_value = SUCCEED;
+
+    if((type_size = H5Tget_size(type_id)) > 0) {
+        if((region_buf = malloc(type_size * (size_t)npoints)) != NULL) {
+            /* Allocate space for the dimension array */
+            if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) {
+                dims1[0] = npoints;
+                if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) {
+                    if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) {
+                        if(H5Sget_simple_extent_dims(region_space, dims1, NULL) >= 0) {
+                            ret_value = h5str_render_bin_output(stream, container, type_id, (char*)region_buf, npoints);
+                        } /* end if(H5Sget_simple_extent_dims(region_space, dims1, NULL) >= 0) */
+                        else
+                            ret_value = -1;
+                    } /* end if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) */
+                    else
+                        ret_value = -1;
+                } /* end if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) */
+                else
+                    ret_value = -1;
+
+                free(dims1);
+            } /* end if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) */
+            else
+                ret_value = -1;
+            free(region_buf);
+        } /* end if((region_buf = malloc(type_size * (size_t)npoints)) != NULL) */
+        else
+            ret_value = -1;
+
+        if(H5Sclose(mem_space) < 0)
+            ret_value = -1;
+    } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print some values from a dataset referenced by region points.
+ *
+ * Description:
+ *      This is a special case subfunction to dump a region reference using points.
+ *
+ * Return:
+ *      The function returns False if the last dimension has been reached, otherwise True
+ *
+ *-------------------------------------------------------------------------
+ */
+int render_bin_output_region_points(FILE *stream, hid_t region_space, hid_t region_id, hid_t container)
+{
+    int      ret_value = SUCCEED;
+    hssize_t npoints;
+    hsize_t  alloc_size;
+    hsize_t *ptdata;
+    int      ndims;
+    hid_t    dtype;
+    hid_t    type_id;
+
+    if((npoints = H5Sget_select_elem_npoints(region_space)) > 0) {
+        /* Allocate space for the dimension array */
+        if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) {
+            alloc_size = npoints * ndims * sizeof(ptdata[0]);
+            if(NULL != (ptdata = (hsize_t *)malloc((size_t) alloc_size))) {
+                if(H5Sget_select_elem_pointlist(region_space, (hsize_t) 0, (hsize_t) npoints, ptdata) >= 0) {
+                    if((dtype = H5Dget_type(region_id)) >= 0) {
+                        if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+                            ret_value = render_bin_output_region_data_points(stream, region_space, region_id,
+                                    container, ndims, type_id, npoints, ptdata);
+
+                            if(H5Tclose(type_id) < 0)
+                                ret_value = -1;
+                        } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+                        else
+                            ret_value = -1;
+
+                        if(H5Tclose(dtype) < 0)
+                            ret_value = -1;
+                    } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+                    else
+                        ret_value = -1;
+                } /* end if(H5Sget_select_elem_pointlist(region_space, (hsize_t) 0, (hsize_t) npoints, ptdata) >= 0) */
+                else
+                    ret_value = -1;
+
+                free(ptdata);
+            } /* end if(NULL != (ptdata = (hsize_t *)malloc((size_t) alloc_size))) */
+            else
+                ret_value = -1;
+        } /* end if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) */
+        else
+            ret_value = -1;
+
+    } /* end if((npoints = H5Sget_select_elem_npoints(region_space)) > 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+int h5str_dump_simple_dset(FILE *stream, hid_t dset, int binary_order)
+{
+    int      ret_value = SUCCEED;
+    hid_t               f_space = -1;                  /* file data space */
+    hsize_t             elmtno;                   /* counter  */
+    size_t              i;                        /* counter  */
+    int                 ndims;
+    int                 carry;                    /* counter carry value */
+    hsize_t             zero[8];                  /* vector of zeros */
+    unsigned int        flags;                    /* buffer extent flags */
+    hsize_t             total_size[H5S_MAX_RANK]; /* total size of dataset*/
+
+    /* Print info */
+    size_t              p_type_nbytes;            /* size of memory type */
+    hsize_t             p_nelmts;                 /* total selected elmts */
+
+    /* Stripmine info */
+    hsize_t             sm_size[H5S_MAX_RANK];    /* stripmine size */
+    hsize_t             sm_nbytes;                /* bytes per stripmine */
+    hsize_t             sm_nelmts;                /* elements per stripmine*/
+    unsigned char      *sm_buf = NULL;            /* buffer for raw data */
+    hid_t               sm_space = -1;                 /* stripmine data space */
+
+    /* Hyperslab info */
+    hsize_t             hs_offset[H5S_MAX_RANK];  /* starting offset */
+    hsize_t             hs_size[H5S_MAX_RANK];    /* size this pass */
+    hsize_t             hs_nelmts;                /* elements in request */
+
+    /* VL data special information */
+    unsigned int        vl_data = 0; /* contains VL datatypes */
+    hid_t               p_type = -1;
+    hid_t               f_type = -1;
+
+    if(dset < 0) return -1;
+    f_type = H5Dget_type(dset);
+    if (binary_order == 1)
+        p_type = h5str_get_native_type(f_type);
+    else if (binary_order == 2)
+        p_type = h5str_get_little_endian_type(f_type);
+    else if (binary_order == 3)
+        p_type = h5str_get_big_endian_type(f_type);
+    else
+        p_type = H5Tcopy(f_type);
+
+    H5Tclose(f_type);
+
+    if (p_type >= 0) {
+        if((f_space = H5Dget_space(dset)) >= 0) {
+            ndims = H5Sget_simple_extent_ndims(f_space);
+
+            if ((size_t)ndims <= (sizeof(sm_size)/sizeof(sm_size[0]))) {
+                H5Sget_simple_extent_dims(f_space, total_size, NULL);
+
+                /* calculate the number of elements we're going to print */
+                p_nelmts = 1;
+
+                if (ndims > 0) {
+                    for (i = 0; i < ndims; i++)
+                        p_nelmts *= total_size[i];
+                } /* end if */
+
+                if (p_nelmts > 0) {
+                    /* Check if we have VL data in the dataset's datatype */
+                    if (h5str_detect_vlen(p_type) != 0)
+                        vl_data = 1;
+
+                    /*
+                     * Determine the strip mine size and allocate a buffer. The strip mine is
+                     * a hyperslab whose size is manageable.
+                     */
+                    sm_nbytes = p_type_nbytes = H5Tget_size(p_type);
+
+                    if (ndims > 0) {
+                        for (i = ndims; i > 0; --i) {
+                            hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
+                            if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */
+                                size = 1;
+                            sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
+                            sm_nbytes *= sm_size[i - 1];
+                        }
+                    }
+
+                    if(sm_nbytes > 0) {
+                        sm_buf = (unsigned char *)malloc((size_t)sm_nbytes);
+
+                        sm_nelmts = sm_nbytes / p_type_nbytes;
+                        sm_space = H5Screate_simple(1, &sm_nelmts, NULL);
+
+                        /* The stripmine loop */
+                        memset(hs_offset, 0, sizeof hs_offset);
+                        memset(zero, 0, sizeof zero);
+
+                        for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts) {
+                            /* Calculate the hyperslab size */
+                            if (ndims > 0) {
+                                for (i = 0, hs_nelmts = 1; i < ndims; i++) {
+                                    hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i])) ? (total_size[i] - hs_offset[i]) : (sm_size[i]));
+                                    hs_nelmts *= hs_size[i];
+                                }
+
+                                H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL);
+                                H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL);
+                            }
+                            else {
+                                H5Sselect_all(f_space);
+                                H5Sselect_all(sm_space);
+                                hs_nelmts = 1;
+                            }
+
+                            /* Read the data */
+                            if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) >= 0) {
+
+                                if (binary_order == 99)
+                                    ret_value = h5tools_dump_simple_data(stream, dset, p_type, sm_buf, hs_nelmts);
+                                else
+                                    ret_value = h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts);
+
+                                /* Reclaim any VL memory, if necessary */
+                                if (vl_data)
+                                    H5Dvlen_reclaim(p_type, sm_space, H5P_DEFAULT, sm_buf);
+                            }
+                            else {
+                                ret_value = -1;
+                                break;
+                            }
+
+                            if(ret_value < 0) break;
+
+                            /* Calculate the next hyperslab offset */
+                            for (i = ndims, carry = 1; i > 0 && carry; --i) {
+                                hs_offset[i - 1] += hs_size[i - 1];
+
+                                if (hs_offset[i - 1] == total_size[i - 1])
+                                    hs_offset[i - 1] = 0;
+                                else
+                                    carry = 0;
+                            }
+                        }
+
+                        if(sm_buf)
+                            free(sm_buf);
+                    }
+                    if(sm_space >= 0 && H5Sclose(sm_space) < 0)
+                        ret_value = -1;
+                }
+            }
+            if(f_space >= 0 && H5Sclose(f_space) < 0)
+                ret_value = -1;
+        } /* end if((f_space = H5Dget_space(dset)) >= 0) */
+        else
+            ret_value = -1;
+
+        if (p_type >= 0)
+            H5Tclose(p_type);
+    }
+    return ret_value;
+}
+
+int h5tools_dump_simple_data(FILE *stream, hid_t container, hid_t type, void *_mem, hsize_t nelmts)
+{
+    int                ret_value = 0;
+    int                line_count;
+    unsigned char     *mem  = (unsigned char*)_mem;
+    size_t             size;   /* datum size */
+    H5T_class_t        type_class;
+    hsize_t            i;         /*element counter  */
+    h5str_t            buffer;    /*string into which to render */
+
+    if((size = H5Tget_size(type)) > 0) {
+        for (i = 0, line_count = 0; i < nelmts; i++, line_count++) {
+            size_t bytes_in        = 0;    /* # of bytes to write  */
+            size_t bytes_wrote     = 0;    /* # of bytes written   */
+            void* memref = mem + i * size;
+
+            /* Render the data element*/
+            h5str_new(&buffer, 32 * size);
+            bytes_in = h5str_sprintf(&buffer, container, type, memref, 1);
+            if(i > 0) {
+                fprintf(stream, ", ");
+                if (line_count >= H5TOOLS_TEXT_BLOCK) {
+                    line_count = 0;
+                    fprintf(stream, "\n");
+                }
+            }
+            fprintf(stream, "%s", buffer.s);
+            h5str_free(&buffer);
+        } /* end for (i = 0; i < nelmts... */
+        fprintf(stream, "\n");
+    } /* end if((size = H5Tget_size(tid)) > 0) */
+    else
+        ret_value = -1;
+
+    return ret_value;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/h5util.h b/source/c/hdf-java/h5util.h
new file mode 100755
index 0000000..6250961
--- /dev/null
+++ b/source/c/hdf-java/h5util.h
@@ -0,0 +1,41 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF5.  The full HDF5 copyright notice, including     *
+ * terms governing use, modification, and redistribution, is contained in    *
+ * the files COPYING and Copyright.html.  COPYING can be found at the root   *
+ * of the source code distribution tree; Copyright.html can be found at the  *
+ * root level of an installed copy of the electronic HDF5 document set and   *
+ * is linked from the top-level documents page.  It can also be found at     *
+ * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html.  If you do not have     *
+ * access to either file, you may request a copy from hdfhelp at ncsa.uiuc.edu. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5UTIL_H__
+#define H5UTIL_H__
+
+#ifndef SUCCEED
+#define SUCCEED     0
+#endif
+
+#ifndef FAIL
+#define FAIL        (-1)
+#endif
+
+typedef struct h5str_t {
+    char    *s;
+    size_t   max;  /* the allocated size of the string */
+} h5str_t;
+
+void    h5str_new (h5str_t *str, size_t len);
+void    h5str_free (h5str_t *str);
+void    h5str_resize (h5str_t *str, size_t new_len);
+char*   h5str_append (h5str_t *str, const char* cstr);
+int     h5str_sprintf(h5str_t *str, hid_t container, hid_t tid, void *buf, int expand_data);
+void    h5str_array_free(char **strs, size_t len);
+int     h5str_dump_simple_dset(FILE *stream, hid_t dset, int binary_order);
+int     h5str_dump_region_blocks_data(h5str_t *str, hid_t region, hid_t region_obj);
+int     h5str_dump_region_points_data(h5str_t *str, hid_t region, hid_t region_obj);
+
+#endif  /* H5UTIL_H__ */
diff --git a/source/c/hdf-java/h5zImp.c b/source/c/hdf-java/h5zImp.c
new file mode 100755
index 0000000..2b3d41a
--- /dev/null
+++ b/source/c/hdf-java/h5zImp.c
@@ -0,0 +1,103 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Reference API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include "h5jni.h"
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Zunregister
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Zunregister
+  (JNIEnv *env, jclass clss,
+  jint filter)
+{
+    herr_t retValue;
+
+    retValue = H5Zunregister((H5Z_filter_t)filter);
+
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Zfilter_avail
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Zfilter_1avail
+  (JNIEnv *env, jclass clss,
+  jint filter)
+{
+    herr_t retValue;
+
+    retValue = H5Zfilter_avail((H5Z_filter_t)filter);
+
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Zget_filter_info
+ * Signature: (I)I
+ */
+
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5Zget_1filter_1info
+  (JNIEnv *env, jclass clss, jint filter)
+{
+    herr_t status;
+    unsigned int flags = 0;
+
+    status = H5Zget_filter_info ((H5Z_filter_t) filter, (unsigned *) &flags);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return flags;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf-java/nativeData.c b/source/c/hdf-java/nativeData.c
new file mode 100755
index 0000000..f49b4d0
--- /dev/null
+++ b/source/c/hdf-java/nativeData.c
@@ -0,0 +1,1254 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This module contains the implementation of all the native methods
+ *  used for number conversion.  This is represented by the Java
+ *  class HDFNativeData.
+ *
+ *  These routines convert one dimensional arrays of bytes into
+ *  one-D arrays of other types (int, float, etc) and vice versa.
+ *
+ *  These routines are called from the Java parts of the Java-C
+ *  interface.
+ *
+ *  ***Important notes:
+ *
+ *     1.  These routines are designed to be portable--they use the
+ *         C compiler to do the required native data manipulation.
+ *     2.  These routines copy the data at least once -- a serious
+ *         but unavoidable performance hit.
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <jni.h>
+#include "hdf5.h"
+#include "h5jni.h"
+
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToInt___3B
+( JNIEnv *env,
+jclass clss,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    jbyte *barr;
+    jintArray rarray;
+    int blen;
+    jint *iarray;
+    jboolean bb;
+    char *bp;
+    jint *iap;
+    int ii;
+    int len;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToInt: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToInt: pin failed");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+    len = blen/sizeof(jint);
+    rarray = ENVPTR->NewIntArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToInt" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToInt: pin iarray failed");
+        return NULL;
+    }
+
+    bp = (char *)barr;
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jint *)bp;
+        iap++;
+        bp += sizeof(jint);
+    }
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToFloat___3B
+( JNIEnv *env,
+jclass clss,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    jbyte *barr;
+    jfloatArray rarray;
+    int blen;
+    jfloat *farray;
+    jboolean bb;
+    char *bp;
+    jfloat *iap;
+    int ii;
+    int len;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToFloat: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToFloat: pin failed");
+        return NULL;
+    }
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+    len = blen/sizeof(jfloat);
+    rarray = ENVPTR->NewFloatArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToFloat" );
+        return NULL;
+    }
+    farray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+    if (farray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToFloat: pin farray failed");
+        return NULL;
+    }
+
+    bp = (char *)barr;
+    iap = farray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jfloat *)bp;
+        iap++;
+        bp += sizeof(jfloat);
+    }
+
+    ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,farray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToShort___3B
+( JNIEnv *env,
+jclass clss,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    jbyte *barr;
+    jshortArray rarray;
+    int blen;
+    jshort *sarray;
+    jboolean bb;
+    char *bp;
+    jshort *iap;
+    int ii;
+    int len;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToShort: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToShort: pin failed");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+    len = blen/sizeof(jshort);
+    rarray = ENVPTR->NewShortArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToShort" );
+        return NULL;
+    }
+
+    sarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+    if (sarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToShort: pin sarray failed");
+        return NULL;
+    }
+
+    bp = (char *)barr;
+    iap = sarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jshort *)bp;
+        iap++;
+        bp += sizeof(jshort);
+    }
+
+    ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,sarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToLong___3B
+( JNIEnv *env,
+jclass clss,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    jbyte *barr;
+    jlongArray rarray;
+    int blen;
+    jlong *larray;
+    jboolean bb;
+    char *bp;
+    jlong *iap;
+    int ii;
+    int len;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToLong: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToLong: pin failed");
+        return NULL;
+    }
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+    len = blen/sizeof(jlong);
+    rarray = ENVPTR->NewLongArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToLong" );
+        return NULL;
+    }
+
+    larray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+    if (larray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToLong: pin larray failed");
+        return NULL;
+    }
+
+    bp = (char *)barr;
+    iap = larray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jlong *)bp;
+        iap++;
+        bp += sizeof(jlong);
+    }
+    ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,larray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+}
+
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToDouble___3B
+( JNIEnv *env,
+jclass clss,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    jbyte *barr;
+    jdoubleArray rarray;
+    int blen;
+    jdouble *darray;
+    jboolean bb;
+    char *bp;
+    jdouble *iap;
+    int ii;
+    int len;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToDouble: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToDouble: pin failed");
+        return NULL;
+    }
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+    len = blen/sizeof(jdouble);
+    rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToDouble" );
+        return NULL;
+    }
+
+    darray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+    if (darray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToDouble: pin darray failed");
+        return NULL;
+    }
+
+    bp = (char *)barr;
+    iap = darray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jdouble *)bp;
+        iap++;
+        bp += sizeof(jdouble);
+    }
+
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,darray,0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+}
+
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToInt__II_3B
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    char *bp;
+    jbyte *barr;
+    jintArray rarray;
+    int blen;
+    jint *iarray;
+    jint *iap;
+    int ii;
+    jboolean bb;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToInt: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError(env,  "byteToInt: pin failed");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+    if ((start < 0) || ((int)(start + (len*sizeof(jint))) > blen)) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToInt: getLen failed");
+        return NULL;
+    }
+
+    bp = (char *)barr + start;
+
+    rarray = ENVPTR->NewIntArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToInt" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError(env,  "byteToInt: pin iarray failed");
+        return NULL;
+    }
+
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jint *)bp;
+        iap++;
+        bp += sizeof(jint);
+    }
+
+    ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+}
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToShort__II_3B
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    char *bp;
+    jbyte *barr;
+    jshortArray rarray;
+    int blen;
+    jshort *iarray;
+    jshort *iap;
+    int ii;
+    jboolean bb;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToShort: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError( env,  "byteToShort: getByte failed?");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+    if ((start < 0) || ((int)(start + (len*(sizeof(jshort)))) > blen)) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5badArgument( env,  "byteToShort: start or len is out of bounds");
+        return NULL;
+    }
+
+    bp = (char *)barr + start;
+
+    rarray = ENVPTR->NewShortArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToShort" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError( env,  "byteToShort: getShort failed?");
+        return NULL;
+    }
+
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jshort *)bp;
+        iap++;
+        bp += sizeof(jshort);
+    }
+
+    ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToFloat__II_3B
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    char *bp;
+    jbyte *barr;
+    jfloatArray rarray;
+    int blen;
+    jfloat *iarray;
+    jfloat *iap;
+    int ii;
+    jboolean bb;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToFloat: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError( env,  "byteToFloat: getByte failed?");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+    if ((start < 0) || ((int)(start + (len*(sizeof(jfloat)))) > blen)) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5badArgument( env,  "byteToFloat: start or len is out of bounds");
+        return NULL;
+    }
+
+    bp = (char *)barr + start;
+
+    rarray = ENVPTR->NewFloatArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToFloat" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError( env,  "byteToFloat: getFloat failed?");
+        return NULL;
+    }
+
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jfloat *)bp;
+        iap++;
+        bp += sizeof(jfloat);
+    }
+
+    ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+}
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToLong__II_3B
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    char *bp;
+    jbyte *barr;
+    jlongArray rarray;
+    int blen;
+    jlong *iarray;
+    jlong *iap;
+    int ii;
+    jboolean bb;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToLong: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError( env,  "byteToLong: getByte failed?");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+    if ((start < 0) || ((int)(start + (len*(sizeof(jlong)))) > blen)) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5badArgument( env,  "byteToLong: start or len is out of bounds");
+        return NULL;
+    }
+
+    bp = (char *)barr + start;
+
+    rarray = ENVPTR->NewLongArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToLong" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError( env,  "byteToLong: getLong failed?");
+        return NULL;
+    }
+
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+
+        *iap = *(jlong *)bp;
+        iap++;
+        bp += sizeof(jlong);
+    }
+
+    ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToDouble__II_3B
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jbyteArray bdata)  /* IN: array of bytes */
+{
+    char *bp;
+    jbyte *barr;
+    jdoubleArray rarray;
+    int blen;
+    jdouble *iarray;
+    jdouble *iap;
+    int ii;
+    jboolean bb;
+
+    if (bdata == NULL) {
+        h5nullArgument( env,  "byteToDouble: bdata is NULL?");
+        return NULL;
+    }
+    barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+    if (barr == NULL) {
+        h5JNIFatalError( env,  "byteToDouble: getByte failed?");
+        return NULL;
+    }
+
+    blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+    if ((start < 0) || ((int)(start + (len*(sizeof(jdouble)))) > blen)) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5badArgument( env,  "byteToDouble: start or len is out of bounds");
+        return NULL;
+    }
+
+    bp = (char *)barr + start;
+
+    rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5outOfMemory( env,  "byteToDouble" );
+        return NULL;
+    }
+
+    iarray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+    if (iarray == NULL) {
+        ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+        h5JNIFatalError( env,  "byteToDouble: getDouble failed?");
+        return NULL;
+    }
+
+    iap = iarray;
+    for (ii = 0; ii < len; ii++) {
+        *iap = *(jdouble *)bp;
+        iap++;
+        bp += sizeof(jdouble);
+    }
+
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,iarray, 0);
+    ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+    return rarray;
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_intToByte__II_3I
+(JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jintArray idata)  /* IN: array of int */
+{
+    jint *ip;
+    jint *iarr;
+    int ilen;
+    jbyteArray rarray;
+    int blen;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ii;
+    int ij;
+    union things {
+        int ival;
+        char bytes[4];
+    } u;
+
+    if (idata == NULL) {
+        h5nullArgument( env,  "intToByte: idata is NULL?");
+        return NULL;
+    }
+
+    iarr = ENVPTR->GetIntArrayElements(ENVPAR idata,&bb);
+    if (iarr == NULL) {
+        h5JNIFatalError( env,  "intToByte: getInt failed?");
+        return NULL;
+    }
+
+    ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+    if ((start < 0) || (((start + len)) > ilen)) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5badArgument( env,  "intToByte: start or len is out of bounds");
+        return NULL;
+    }
+
+    ip = iarr + start;
+
+    blen = ilen * sizeof(jint);
+    rarray = ENVPTR->NewByteArray(ENVPAR blen);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5outOfMemory( env,  "intToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5JNIFatalError( env,  "intToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    for (ii = 0; ii < len; ii++) {
+        u.ival = *ip++;
+        for (ij = 0; ij < sizeof(jint); ij++) {
+            *bap = u.bytes[ij];
+            bap++;
+        }
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+    return rarray;
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_shortToByte__II_3S
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jshortArray idata)  /* IN: array of short */
+{
+    jshort *ip;
+    jshort *iarr;
+    int ilen;
+    jbyteArray rarray;
+    int blen;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ii;
+    int ij;
+    union things {
+        short ival;
+        char bytes[4];
+    } u;
+
+    if (idata == NULL) {
+        h5nullArgument( env,  "shortToByte: idata is NULL?");
+        return NULL;
+    }
+    iarr = ENVPTR->GetShortArrayElements(ENVPAR idata,&bb);
+    if (iarr == NULL) {
+        h5JNIFatalError( env,  "shortToByte: getShort failed?");
+        return NULL;
+    }
+
+    ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+    if ((start < 0) || (((start + len)) > ilen)) {
+        ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5badArgument( env,  "shortToByte: start or len is out of bounds");
+        return NULL;
+    }
+
+    ip = iarr + start;
+
+    blen = ilen * sizeof(jshort);
+    rarray = ENVPTR->NewByteArray(ENVPAR blen);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5outOfMemory( env,  "shortToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5JNIFatalError( env,  "shortToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    for (ii = 0; ii < len; ii++) {
+        u.ival = *ip++;
+        for (ij = 0; ij < sizeof(jshort); ij++) {
+            *bap = u.bytes[ij];
+            bap++;
+        }
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_floatToByte__II_3F
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jfloatArray idata)  /* IN: array of float */
+{
+    jfloat *ip;
+    jfloat *iarr;
+    int ilen;
+    jbyteArray rarray;
+    int blen;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ii;
+    int ij;
+    union things {
+        float ival;
+        char bytes[4];
+    } u;
+
+    if (idata == NULL) {
+        h5nullArgument( env,  "floatToByte: idata is NULL?");
+        return NULL;
+    }
+    iarr = ENVPTR->GetFloatArrayElements(ENVPAR idata,&bb);
+    if (iarr == NULL) {
+        h5JNIFatalError( env,  "floatToByte: getFloat failed?");
+        return NULL;
+    }
+
+    ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+    if ((start < 0) || (((start + len)) > ilen)) {
+        ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5badArgument( env,  "floatToByte: start or len is out of bounds");
+        return NULL;
+    }
+
+    ip = iarr + start;
+
+    blen = ilen * sizeof(jfloat);
+    rarray = ENVPTR->NewByteArray(ENVPAR blen);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5outOfMemory( env,  "floatToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5JNIFatalError( env,  "floatToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    for (ii = 0; ii < len; ii++) {
+        u.ival = *ip++;
+        for (ij = 0; ij < sizeof(jfloat); ij++) {
+            *bap = u.bytes[ij];
+            bap++;
+        }
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+    return rarray;
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_doubleToByte__II_3D
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jdoubleArray idata)  /* IN: array of double */
+{
+    jdouble *ip;
+    jdouble *iarr;
+    int ilen;
+    jbyteArray rarray;
+    int blen;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ii;
+    int ij;
+    union things {
+        double ival;
+        char bytes[8];
+    } u;
+
+    if (idata == NULL) {
+        h5nullArgument( env,  "doubleToByte: idata is NULL?");
+        return NULL;
+    }
+    iarr = ENVPTR->GetDoubleArrayElements(ENVPAR idata,&bb);
+    if (iarr == NULL) {
+        h5JNIFatalError( env,  "doubleToByte: getDouble failed?");
+        return NULL;
+    }
+
+    ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+    if ((start < 0) || (((start + len)) > ilen)) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5badArgument( env,  "doubleToByte: start or len is out of bounds");
+        return NULL;
+    }
+
+    ip = iarr + start;
+
+    blen = ilen * sizeof(jdouble);
+    rarray = ENVPTR->NewByteArray(ENVPAR blen);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5outOfMemory( env,  "doubleToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5JNIFatalError( env,  "doubleToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    for (ii = 0; ii < len; ii++) {
+        u.ival = *ip++;
+        for (ij = 0; ij < sizeof(jdouble); ij++) {
+            *bap = u.bytes[ij];
+            bap++;
+        }
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+    return rarray;
+}
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_longToByte__II_3J
+( JNIEnv *env,
+jclass clss,
+jint start,
+jint len,
+jlongArray idata)  /* IN: array of long */
+{
+    jlong *ip;
+    jlong *iarr;
+    int ilen;
+    jbyteArray rarray;
+    int blen;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ii;
+    int ij;
+    union things {
+        jlong ival;
+        char bytes[8];
+    } u;
+
+    if (idata == NULL) {
+        h5nullArgument( env,  "longToByte: idata is NULL?");
+        return NULL;
+    }
+    iarr = ENVPTR->GetLongArrayElements(ENVPAR idata,&bb);
+    if (iarr == NULL) {
+        h5JNIFatalError( env,  "longToByte: getLong failed?");
+        return NULL;
+    }
+
+    ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+    if ((start < 0) || (((start + len)) > ilen)) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5badArgument( env,  "longToByte: start or len is out of bounds?\n");
+        return NULL;
+    }
+
+    ip = iarr + start;
+
+    blen = ilen * sizeof(jlong);
+    rarray = ENVPTR->NewByteArray(ENVPAR blen);
+    if (rarray == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5outOfMemory( env,  "longToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+        h5JNIFatalError( env,  "longToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    for (ii = 0; ii < len; ii++) {
+        u.ival = *ip++;
+        for (ij = 0; ij < sizeof(jlong); ij++) {
+            *bap = u.bytes[ij];
+            bap++;
+        }
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+    return rarray;
+
+}
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_intToByte__I
+( JNIEnv *env,
+jclass clss,
+jint idata)  /* IN: int */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    int ij;
+    jboolean bb;
+    union things {
+        int ival;
+        char bytes[sizeof(int)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jint));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "intToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "intToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jint); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+    return rarray;
+
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_floatToByte__F
+( JNIEnv *env,
+jclass clss,
+jfloat idata)  /* IN: int */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ij;
+    union things {
+        float ival;
+        char bytes[sizeof(float)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jfloat));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "floatToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "floatToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jfloat); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+    return rarray;
+
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_shortToByte__S
+( JNIEnv *env,
+jclass clss,
+jshort idata)  /* IN: short */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ij;
+    union things {
+        short ival;
+        char bytes[sizeof(short)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jshort));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "shortToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "shortToByte: getShort failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jshort); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+    return rarray;
+}
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_doubleToByte__D
+( JNIEnv *env,
+jclass clss,
+jdouble idata)  /* IN: double */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ij;
+    union things {
+        double ival;
+        char bytes[sizeof(double)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jdouble));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "doubleToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "doubleToByte: getDouble failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jdouble); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+    return rarray;
+}
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_longToByte__J
+( JNIEnv *env,
+jclass clss,
+jlong idata)  /* IN: array of long */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ij;
+    union things {
+        jlong ival;
+        char bytes[sizeof(jlong)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jlong));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "longToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "longToByte: getLong failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jlong); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+    return rarray;
+}
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_ncsa_hdf_hdf5lib_HDFNativeData_byteToByte__B
+( JNIEnv *env,
+jclass clss,
+jbyte idata)  /* IN: array of long */
+{
+    jbyteArray rarray;
+    jbyte *barray;
+    jbyte *bap;
+    jboolean bb;
+    int ij;
+    union things {
+        jbyte ival;
+        char bytes[sizeof(jbyte)];
+    } u;
+
+    rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jbyte));
+    if (rarray == NULL) {
+        h5outOfMemory( env,  "byteToByte" );
+        return NULL;
+    }
+
+    barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+    if (barray == NULL) {
+        h5JNIFatalError( env,  "byteToByte: getByte failed?");
+        return NULL;
+    }
+
+    bap = barray;
+    u.ival = idata;
+    for (ij = 0; ij < sizeof(jbyte); ij++) {
+        *bap = u.bytes[ij];
+        bap++;
+    }
+
+    ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+    return rarray;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/hdf5_win_compile.diff b/source/c/hdf5_win_compile.diff
new file mode 100644
index 0000000..b71b7b3
--- /dev/null
+++ b/source/c/hdf5_win_compile.diff
@@ -0,0 +1,20 @@
+diff -ruN hdf5-1.8.13.orig/compile_windows_i386.bat hdf5-1.8.13/compile_windows_i386.bat
+--- compile_windows_i386.bat	1970-01-01 01:00:00.000000000 +0100
++++ compile_windows_i386.bat	2013-06-22 00:04:09.000000000 +0200
+@@ -0,0 +1,6 @@
++ at echo off
++rd /S /Q build
++mkdir build
++cd build
++cmake -G "Visual Studio 9 2008" -DBUILD_SHARED_LIBS:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ" -DZLIB_TGZ_NAME:STRING="zlib-1.2.8.tar.gz" -DTGZPATH:STRING="u:/winbuild" ..
++cmake --build . --config Release
+diff -ruN hdf5-1.8.13.orig/compile_windows_x64.bat hdf5-1.8.13/compile_windows_x64.bat
+--- compile_windows_x64.bat	1970-01-01 01:00:00.000000000 +0100
++++ compile_windows_x64.bat	2013-06-22 00:05:05.000000000 +0200
+@@ -0,0 +1,6 @@
++ at echo off
++rd /S /Q build
++mkdir build
++cd build
++cmake -G "Visual Studio 9 2008 Win64" -DBUILD_SHARED_LIBS:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ" -DZLIB_TGZ_NAME:STRING="zlib-1.2.8.tar.gz" -DTGZPATH:STRING="u:/winbuild" ..
++cmake --build . --config Release
diff --git a/source/c/hdf5_win_mt.diff b/source/c/hdf5_win_mt.diff
new file mode 100644
index 0000000..0fa6ba8
--- /dev/null
+++ b/source/c/hdf5_win_mt.diff
@@ -0,0 +1,1659 @@
+Index: test/CMakeLists.txt
+===================================================================
+--- test/CMakeLists.txt	(revision 23770)
++++ test/CMakeLists.txt	(revision 23771)
+@@ -27,6 +27,7 @@
+ )
+ 
+ ADD_LIBRARY (${HDF5_TEST_LIB_TARGET} ${LIB_TYPE} ${TEST_LIB_SRCS} ${TEST_LIB_HEADERS})
++TARGET_C_PROPERTIES (${HDF5_TEST_LIB_TARGET} " " " ")
+ IF (MSVC)
+   TARGET_LINK_LIBRARIES (${HDF5_TEST_LIB_TARGET} "ws2_32.lib")
+ ENDIF (MSVC)
+@@ -64,6 +65,7 @@
+     INCLUDE_DIRECTORIES (${HDF5_SRC_DIR})
+ 
+     ADD_LIBRARY (${HDF5_TEST_PLUGIN_LIB_TARGET} ${LIB_TYPE} ${HDF5_TEST_SOURCE_DIR}/${test_lib}.c)
++   TARGET_C_PROPERTIES (${HDF5_TEST_PLUGIN_LIB_TARGET} " " " ")
+     TARGET_LINK_LIBRARIES (${HDF5_TEST_PLUGIN_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+     H5_SET_LIB_OPTIONS (
+         ${HDF5_TEST_PLUGIN_LIB_TARGET} ${HDF5_TEST_PLUGIN_LIB_NAME}
+@@ -94,6 +96,7 @@
+     INCLUDE_DIRECTORIES (${HDF5_SRC_DIR})
+ 
+     ADD_LIBRARY (${HDF5_TEST_PLUGIN_LIB_TARGET} ${LIB_TYPE} ${HDF5_TEST_SOURCE_DIR}/${test_lib}.c)
++    TARGET_C_PROPERTIES (${HDF5_TEST_PLUGIN_LIB_TARGET} " " " ")
+     TARGET_LINK_LIBRARIES (${HDF5_TEST_PLUGIN_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+     H5_SET_LIB_OPTIONS (
+         ${HDF5_TEST_PLUGIN_LIB_TARGET} ${HDF5_TEST_PLUGIN_LIB_NAME}
+@@ -237,6 +240,7 @@
+ #-- Adding test for testhdf5
+ ADD_EXECUTABLE (testhdf5 ${testhdf5_SRCS})
+ TARGET_NAMING (testhdf5 ${LIB_TYPE})
++TARGET_C_PROPERTIES (testhdf5 " " " ")
+ TARGET_LINK_LIBRARIES (testhdf5 ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (testhdf5 PROPERTIES FOLDER test)
+ 
+@@ -288,6 +292,7 @@
+ MACRO (ADD_H5_TEST file)
+   ADD_EXECUTABLE (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c)
+   TARGET_NAMING (${file} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (${file} " " " ")
+   TARGET_LINK_LIBRARIES (${file} ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (${file} PROPERTIES FOLDER test)
+ 
+@@ -428,6 +433,7 @@
+ #-- Adding test for cache
+ ADD_EXECUTABLE (cache ${HDF5_TEST_SOURCE_DIR}/cache.c ${HDF5_TEST_SOURCE_DIR}/cache_common.c)
+ TARGET_NAMING (cache ${LIB_TYPE})
++TARGET_C_PROPERTIES (cache " " " ")
+ TARGET_LINK_LIBRARIES (cache ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (cache PROPERTIES FOLDER test)
+ ADD_TEST (
+@@ -442,6 +448,7 @@
+ #-- Adding test for cache_api
+ ADD_EXECUTABLE (cache_api ${HDF5_TEST_SOURCE_DIR}/cache_api.c ${HDF5_TEST_SOURCE_DIR}/cache_common.c)
+ TARGET_NAMING (cache_api ${LIB_TYPE})
++TARGET_C_PROPERTIES (cache_api " " " ")
+ TARGET_LINK_LIBRARIES (cache_api ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (cache_api PROPERTIES FOLDER test)
+ 
+@@ -463,6 +470,7 @@
+     ${HDF5_TEST_SOURCE_DIR}/ttsafe_acreate.c
+ )
+ TARGET_NAMING (ttsafe ${LIB_TYPE})
++TARGET_C_PROPERTIES (ttsafe " " " ")
+ TARGET_LINK_LIBRARIES (ttsafe ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (ttsafe PROPERTIES FOLDER test)
+ 
+@@ -482,6 +490,7 @@
+ IF (HDF5_ENABLE_DEPRECATED_SYMBOLS)
+   ADD_EXECUTABLE (err_compat ${HDF5_TEST_SOURCE_DIR}/err_compat.c)
+   TARGET_NAMING (err_compat ${LIB_TYPE})
++  TARGET_C_PROPERTIES (err_compat " " " ")
+   TARGET_LINK_LIBRARIES (err_compat ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+   SET_TARGET_PROPERTIES (err_compat PROPERTIES FOLDER test)
+ 
+@@ -508,6 +517,7 @@
+ #-- Adding test for error_test
+ ADD_EXECUTABLE (error_test ${HDF5_TEST_SOURCE_DIR}/error_test.c)
+ TARGET_NAMING (error_test ${LIB_TYPE})
++TARGET_C_PROPERTIES (error_test " " " ")
+ TARGET_LINK_LIBRARIES (error_test ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (error_test PROPERTIES FOLDER test)
+ 
+@@ -534,6 +544,7 @@
+ #-- Adding test for links_env
+ ADD_EXECUTABLE (links_env ${HDF5_TEST_SOURCE_DIR}/links_env.c)
+ TARGET_NAMING (links_env ${LIB_TYPE})
++TARGET_C_PROPERTIES (links_env " " " ")
+ TARGET_LINK_LIBRARIES (links_env ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (links_env PROPERTIES FOLDER test)
+ 
+@@ -587,6 +598,7 @@
+ 
+   ADD_EXECUTABLE (plugin ${HDF5_TEST_SOURCE_DIR}/plugin.c)
+   TARGET_NAMING (plugin ${LIB_TYPE})
++  TARGET_C_PROPERTIES (plugin " " " ")
+   TARGET_LINK_LIBRARIES (plugin ${HDF5_TEST_PLUGIN_LIB_TARGET})
+   SET_TARGET_PROPERTIES (plugin PROPERTIES FOLDER test)
+ 
+@@ -720,6 +732,7 @@
+   MACRO (ADD_H5_GENERATOR genfile)
+     ADD_EXECUTABLE (${genfile} ${HDF5_TEST_SOURCE_DIR}/${genfile}.c)
+     TARGET_NAMING (${genfile} ${LIB_TYPE})
++    TARGET_C_PROPERTIES (${genfile} " " " ")
+     TARGET_LINK_LIBRARIES (${genfile} ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (${genfile} PROPERTIES FOLDER generator/test)
+   ENDMACRO (ADD_H5_GENERATOR genfile)
+Index: configure
+===================================================================
+--- configure	(revision 23770)
++++ configure	(revision 23771)
+@@ -31025,8 +31025,8 @@
+ ## Enable strict file format checks
+ ##
+ 
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Whether to perform strict file format checks" >&5
+-$as_echo_n "checking Whether to perform strict file format checks... " >&6; };
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to perform strict file format checks" >&5
++$as_echo_n "checking whether to perform strict file format checks... " >&6; };
+ # Check whether --enable-strict-format-checks was given.
+ if test "${enable_strict_format_checks+set}" = set; then :
+   enableval=$enable_strict_format_checks; STRICT_CHECKS=$enableval
+@@ -31062,8 +31062,8 @@
+ ## ----------------------------------------------------------------------
+ ## Enable embedded library information
+ ##
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Whether to have library information embedded in the executables" >&5
+-$as_echo_n "checking Whether to have library information embedded in the executables... " >&6; }
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to have library information embedded in the executables" >&5
++$as_echo_n "checking whether to have library information embedded in the executables... " >&6; }
+ # Check whether --enable-embedded-libinfo was given.
+ if test "${enable_embedded_libinfo+set}" = set; then :
+   enableval=$enable_embedded_libinfo; enable_embedded_libinfo=$enableval
+Index: testpar/CMakeLists.txt
+===================================================================
+--- testpar/CMakeLists.txt	(revision 23770)
++++ testpar/CMakeLists.txt	(revision 23771)
+@@ -28,6 +28,7 @@
+ #-- Adding test for testhdf5
+ ADD_EXECUTABLE (testphdf5 ${testphdf5_SRCS})
+ TARGET_NAMING (testphdf5 ${LIB_TYPE})
++TARGET_C_PROPERTIES (testphdf5 " " " ")
+ TARGET_LINK_LIBRARIES (testphdf5 ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_LIBS})
+ SET_TARGET_PROPERTIES (testphdf5 PROPERTIES FOLDER test/par)
+ 
+@@ -36,6 +37,7 @@
+ MACRO (ADD_H5P_TEST file)
+   ADD_EXECUTABLE (${file} ${HDF5_TEST_PAR_SOURCE_DIR}/${file}.c)
+   TARGET_NAMING (${file} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (${file} " " " ")
+   TARGET_LINK_LIBRARIES (${file} ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_LIBS})
+   SET_TARGET_PROPERTIES (${file} PROPERTIES FOLDER test/par)
+ 
+Index: src/CMakeLists.txt
+===================================================================
+--- src/CMakeLists.txt	(revision 23770)
++++ src/CMakeLists.txt	(revision 23771)
+@@ -614,6 +614,7 @@
+ # specific type checks inside
+ #-----------------------------------------------------------------------------
+ ADD_EXECUTABLE (H5detect ${HDF5_SRC_DIR}/H5detect.c)
++TARGET_C_PROPERTIES (H5detect " " " ")
+ IF (MSVC)
+   TARGET_LINK_LIBRARIES (H5detect "ws2_32.lib")
+ ENDIF (MSVC)
+@@ -627,6 +628,7 @@
+ )
+ 
+ ADD_EXECUTABLE (H5make_libsettings ${HDF5_SRC_DIR}/H5make_libsettings.c)
++TARGET_C_PROPERTIES (H5make_libsettings " " " ")
+ IF (MSVC)
+   TARGET_LINK_LIBRARIES (H5make_libsettings "ws2_32.lib")
+ ENDIF (MSVC)
+@@ -676,6 +678,7 @@
+ SET_SOURCE_FILES_PROPERTIES (${HDF5_BINARY_DIR}/H5overflow.h GENERATED)
+ 
+ ADD_LIBRARY (${HDF5_LIB_TARGET} ${LIB_TYPE} ${common_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS})
++TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_LIB_TARGET} ${LINK_LIBS})
+ IF (NOT WIN32)
+   TARGET_LINK_LIBRARIES (${HDF5_LIB_TARGET} dl)
+Index: release_docs/USING_HDF5_CMake.txt
+===================================================================
+--- release_docs/USING_HDF5_CMake.txt	(revision 23770)
++++ release_docs/USING_HDF5_CMake.txt	(revision 23771)
+@@ -197,6 +197,7 @@
+ SET (example hdf_example)
+ 
+ ADD_EXECUTABLE (${example} ${PROJECT_SOURCE_DIR}/${example}.c)
++TARGET_C_PROPERTIES (${example} " " " ")
+ TARGET_LINK_LIBRARIES (${example} ${LINK_LIBS})
+ 
+ ENABLE_TESTING ()
+Index: tools/h5dump/testh5dump.sh.in
+===================================================================
+--- tools/h5dump/testh5dump.sh.in	(revision 23770)
++++ tools/h5dump/testh5dump.sh.in	(revision 23771)
+@@ -59,7 +59,9 @@
+ SRC_H5STAT_TESTFILES="$SRC_TOOLS/h5stat/testfiles"
+ SRC_H5IMPORT_TESTFILES="$SRC_TOOLS/h5import/testfiles"
+ 
++TEST_P_DIR=./testfiles
+ TESTDIR=./testfiles/std
++test -d $TEST_P_DIR || mkdir -p $TEST_P_DIR
+ test -d $TESTDIR || mkdir -p $TESTDIR
+ 
+ ######################################################################
+@@ -272,6 +274,8 @@
+ $SRC_H5DUMP_TESTFILES/tnullspace.ddl
+ $SRC_H5DUMP_TESTFILES/trawdatafile.ddl
+ $SRC_H5DUMP_TESTFILES/trawdatafile.exp
++$SRC_H5DUMP_TESTFILES/trawssetfile.ddl
++$SRC_H5DUMP_TESTFILES/trawssetfile.exp
+ $SRC_H5DUMP_TESTFILES/zerodim.ddl
+ $SRC_H5DUMP_TESTFILES/tordergr1.ddl
+ $SRC_H5DUMP_TESTFILES/tordergr2.ddl
+@@ -871,6 +875,7 @@
+ TOOLTEST2 trawdatafile.exp --enable-error-stack -y -o trawdatafile.txt packedbits.h5
+ TOOLTEST2 tnoddlfile.exp --enable-error-stack -O -y -o tnoddlfile.txt packedbits.h5
+ TOOLTEST2A twithddlfile.exp twithddl.exp --enable-error-stack --ddl=twithddl.txt -y -o twithddlfile.txt packedbits.h5
++TOOLTEST2 trawssetfile.exp --enable-error-stack -d "/dset1[1,1;;;]" -y -o trawssetfile.txt tdset.h5
+ 
+ # test for maximum display datasets
+ TOOLTEST twidedisplay.ddl --enable-error-stack -w0 packedbits.h5
+Index: tools/h5dump/testh5dumppbits.sh.in
+===================================================================
+--- tools/h5dump/testh5dumppbits.sh.in	(revision 23770)
++++ tools/h5dump/testh5dumppbits.sh.in	(revision 23771)
+@@ -64,7 +64,9 @@
+ SRC_H5STAT_TESTFILES="$SRC_TOOLS/h5stat/testfiles"
+ SRC_H5IMPORT_TESTFILES="$SRC_TOOLS/h5import/testfiles"
+ 
++TEST_P_DIR=./testfiles
+ TESTDIR=./testfiles/pbits
++test -d $TEST_P_DIR || mkdir -p $TEST_P_DIR
+ test -d $TESTDIR || mkdir -p $TESTDIR
+ 
+ ######################################################################
+Index: tools/h5dump/testh5dumpxml.sh.in
+===================================================================
+--- tools/h5dump/testh5dumpxml.sh.in	(revision 23770)
++++ tools/h5dump/testh5dumpxml.sh.in	(revision 23771)
+@@ -50,7 +50,9 @@
+ SRC_H5STAT_TESTFILES="$SRC_TOOLS/h5stat/testfiles"
+ SRC_H5IMPORT_TESTFILES="$SRC_TOOLS/h5import/testfiles"
+ 
++TEST_P_DIR=./testfiles
+ TESTDIR=./testfiles/xml
++test -d $TEST_P_DIR || mkdir -p $TEST_P_DIR
+ test -d $TESTDIR || mkdir -p $TESTDIR
+ 
+ ######################################################################
+Index: tools/h5dump/CMakeLists.txt
+===================================================================
+--- tools/h5dump/CMakeLists.txt	(revision 23770)
++++ tools/h5dump/CMakeLists.txt	(revision 23771)
+@@ -15,6 +15,7 @@
+     ${HDF5_TOOLS_H5DUMP_SOURCE_DIR}/h5dump_xml.c
+ )
+ TARGET_NAMING (h5dump ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5dump " " " ")
+ TARGET_LINK_LIBRARIES (h5dump  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5dump PROPERTIES FOLDER tools)
+ 
+@@ -33,6 +34,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5dumpgentest ${HDF5_TOOLS_H5DUMP_SOURCE_DIR}/h5dumpgentest.c)
+     TARGET_NAMING (h5dumpgentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5dumpgentest " " " ")
+     TARGET_LINK_LIBRARIES (h5dumpgentest ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5dumpgentest PROPERTIES FOLDER generator/tools)
+     
+@@ -147,6 +149,7 @@
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/tno-subset.ddl
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/tnullspace.ddl
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/trawdatafile.ddl
++      ${HDF5_TOOLS_SRC_DIR}/testfiles/trawssetfile.ddl
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/zerodim.ddl
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/tordergr1.ddl
+       ${HDF5_TOOLS_SRC_DIR}/testfiles/tordergr2.ddl
+@@ -198,6 +201,7 @@
+       tall-6.exp
+       tnoddlfile.exp
+       trawdatafile.exp
++      trawssetfile.exp
+       tstr2bin2.exp
+       tstr2bin6.exp
+       twithddl.exp
+@@ -1250,6 +1254,9 @@
+           trawdatafile.out
+           trawdatafile.out.err
+           trawdatafile.txt
++          trawssetfile.out
++          trawssetfile.out.err
++          trawssetfile.txt
+           tno-subset.out
+           tno-subset.out.err
+           tnullspace.out
+@@ -1365,7 +1372,8 @@
+   ADD_H5_TEST (tnoattrdata 0 --enable-error-stack -A -o tattr.h5)
+   ADD_H5_TEST_EXPORT (trawdatafile packedbits.h5 0 --enable-error-stack -y -o)
+   ADD_H5_TEST_EXPORT (tnoddlfile packedbits.h5 0 --enable-error-stack -O -y -o)
+-
++  ADD_H5_TEST_EXPORT (trawssetfile tdset.h5 0 --enable-error-stack -d "/dset1[1,1;;;]" -y -o)
++  
+   ADD_H5_TEST_EXPORT_DDL (twithddlfile packedbits.h5 0 twithddl --enable-error-stack --ddl=twithddl.txt -y -o)
+   
+   # test for maximum display datasets
+Index: tools/h5repack/CMakeLists.txt
+===================================================================
+--- tools/h5repack/CMakeLists.txt	(revision 23770)
++++ tools/h5repack/CMakeLists.txt	(revision 23771)
+@@ -22,6 +22,7 @@
+ 
+ ADD_EXECUTABLE (h5repack ${REPACK_COMMON_SRCS} ${HDF5_TOOLS_H5REPACK_SOURCE_DIR}/h5repack_main.c)
+ TARGET_NAMING (h5repack ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5repack " " " ")
+ TARGET_LINK_LIBRARIES (h5repack  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5repack PROPERTIES FOLDER tools)
+ 
+@@ -39,11 +40,13 @@
+   # --------------------------------------------------------------------
+   ADD_EXECUTABLE (testh5repack_detect_szip ${HDF5_TOOLS_H5REPACK_SOURCE_DIR}/testh5repack_detect_szip.c)
+   TARGET_NAMING (testh5repack_detect_szip ${LIB_TYPE})
++  TARGET_C_PROPERTIES (testh5repack_detect_szip " " " ")
+   TARGET_LINK_LIBRARIES (testh5repack_detect_szip ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+   SET_TARGET_PROPERTIES (testh5repack_detect_szip PROPERTIES FOLDER tools)
+ 
+   ADD_EXECUTABLE (h5repacktest ${REPACK_COMMON_SRCS} ${HDF5_TOOLS_H5REPACK_SOURCE_DIR}/h5repacktst.c)
+   TARGET_NAMING (h5repacktest ${LIB_TYPE})
++  TARGET_C_PROPERTIES (h5repacktest " " " ")
+   TARGET_LINK_LIBRARIES (h5repacktest  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+   SET_TARGET_PROPERTIES (h5repacktest PROPERTIES FOLDER tools)
+ 
+Index: tools/testfiles/trawssetfile.exp
+===================================================================
+--- tools/testfiles/trawssetfile.exp	(revision 0)
++++ tools/testfiles/trawssetfile.exp	(revision 23771)
+@@ -0,0 +1,2 @@
++
++         2
+\ No newline at end of file
+Index: tools/testfiles/trawssetfile.ddl
+===================================================================
+--- tools/testfiles/trawssetfile.ddl	(revision 0)
++++ tools/testfiles/trawssetfile.ddl	(revision 23771)
+@@ -0,0 +1,14 @@
++HDF5 "tdset.h5" {
++DATASET "/dset1" {
++   DATATYPE  H5T_STD_I32BE
++   DATASPACE  SIMPLE { ( 10, 20 ) / ( 10, 20 ) }
++   SUBSET {
++      START ( 1, 1 );
++      STRIDE ( 1, 1 );
++      COUNT ( 1, 1 );
++      BLOCK ( 1, 1 );
++      DATA {
++      }
++   }
++}
++}
+Index: tools/h5jam/CMakeLists.txt
+===================================================================
+--- tools/h5jam/CMakeLists.txt	(revision 23770)
++++ tools/h5jam/CMakeLists.txt	(revision 23771)
+@@ -11,21 +11,25 @@
+ # --------------------------------------------------------------------
+ ADD_EXECUTABLE (h5jam ${HDF5_TOOLS_H5JAM_SOURCE_DIR}/h5jam.c)
+ TARGET_NAMING (h5jam ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5jam " " " ")
+ TARGET_LINK_LIBRARIES (h5jam  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5jam PROPERTIES FOLDER tools)
+ 
+ ADD_EXECUTABLE (getub ${HDF5_TOOLS_H5JAM_SOURCE_DIR}/getub.c)
+ TARGET_NAMING (getub ${LIB_TYPE})
++TARGET_C_PROPERTIES (getub " " " ")
+ TARGET_LINK_LIBRARIES (getub  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (getub PROPERTIES FOLDER tools)
+ 
+ ADD_EXECUTABLE (tellub ${HDF5_TOOLS_H5JAM_SOURCE_DIR}/tellub.c)
+ TARGET_NAMING (tellub ${LIB_TYPE})
++TARGET_C_PROPERTIES (tellub " " " ")
+ TARGET_LINK_LIBRARIES (tellub  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (tellub PROPERTIES FOLDER tools)
+ 
+ ADD_EXECUTABLE (h5unjam ${HDF5_TOOLS_H5JAM_SOURCE_DIR}/h5unjam.c)
+ TARGET_NAMING (h5unjam ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5unjam " " " ")
+ TARGET_LINK_LIBRARIES (h5unjam  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5unjam PROPERTIES FOLDER tools)
+ 
+@@ -49,6 +53,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5jamgentest ${HDF5_TOOLS_H5JAM_SOURCE_DIR}/h5jamgentest.c)
+     TARGET_NAMING (h5jamgentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (testhdf5 " " " ")
+     TARGET_LINK_LIBRARIES (h5jamgentest ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5jamgentest PROPERTIES FOLDER generator/tools)
+     
+Index: tools/h5diff/CMakeLists.txt
+===================================================================
+--- tools/h5diff/CMakeLists.txt	(revision 23770)
++++ tools/h5diff/CMakeLists.txt	(revision 23771)
+@@ -14,6 +14,7 @@
+     ${HDF5_TOOLS_H5DIFF_SOURCE_DIR}/h5diff_main.c
+ )
+ TARGET_NAMING (h5diff ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5diff " " " ")
+ TARGET_LINK_LIBRARIES (h5diff  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5diff PROPERTIES FOLDER tools)
+ 
+@@ -25,6 +26,7 @@
+       ${HDF5_TOOLS_H5DIFF_SOURCE_DIR}/ph5diff_main.c
+   )
+   TARGET_NAMING (ph5diff ${LIB_TYPE})
++  TARGET_C_PROPERTIES (ph5diff " " " ")
+   TARGET_LINK_LIBRARIES (ph5diff  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (ph5diff PROPERTIES FOLDER tools)
+ ENDIF (H5_HAVE_PARALLEL)
+@@ -42,6 +44,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5diffgentest ${HDF5_TOOLS_H5DIFF_SOURCE_DIR}/h5diffgentest.c)
+     TARGET_NAMING (h5diffgentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5diffgentest " " " ")
+     TARGET_LINK_LIBRARIES (h5diffgentest ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5diffgentest PROPERTIES FOLDER generator/tools)
+     
+Index: tools/lib/h5tools_dump.c
+===================================================================
+--- tools/lib/h5tools_dump.c	(revision 23770)
++++ tools/lib/h5tools_dump.c	(revision 23771)
+@@ -314,13 +314,13 @@
+                     if (region_space >= 0) {
+                         if (h5tools_is_zero(memref, H5Tget_size(type))) {
+                             ctx->need_prefix = TRUE;
+-                            h5tools_simple_prefix(stream, info, ctx, curr_pos, 0);
++                            h5tools_simple_prefix(rawoutstream, info, ctx, curr_pos, 0);
+ 
+                             /* Render the region element begin */
+                             h5tools_str_reset(&buffer);
+                             h5tools_str_append(&buffer, "NULL");
+ 
+-                            dimension_break = h5tools_render_element(stream, info,
++                            dimension_break = h5tools_render_element(rawoutstream, info,
+                                        ctx, &buffer, &curr_pos, ncols, i, elmt_counter);
+                         }
+                         else {
+@@ -328,25 +328,25 @@
+                                 HERROR(H5E_tools_g, H5E_tools_min_id_g, "H5Rget_name failed");
+ 
+                             ctx->need_prefix = TRUE;
+-                            h5tools_simple_prefix(stream, info, ctx, curr_pos+i, 0);
++                            h5tools_simple_prefix(rawoutstream, info, ctx, curr_pos+i, 0);
+ 
+                             /* Render the region element begin */
+                             h5tools_str_reset(&buffer);
+                             h5tools_str_append(&buffer, info->dset_format, ref_name);
+ 
+-                            dimension_break = h5tools_render_element(stream, info,
++                            dimension_break = h5tools_render_element(rawoutstream, info,
+                                        ctx, &buffer, &curr_pos, ncols, i, elmt_counter);
+ 
+                             region_type = H5Sget_select_type(region_space);
+                             if(region_type == H5S_SEL_POINTS)
+                                 /* Print point information */
+                                 dimension_break = h5tools_dump_region_data_points(
+-                                                       region_space, region_id, stream, info, ctx,
++                                                       region_space, region_id, rawoutstream, info, ctx,
+                                                        &buffer, &curr_pos, ncols, i, elmt_counter);
+                             else if(region_type == H5S_SEL_HYPERSLABS)
+                                 /* Print block information */
+                                 dimension_break = h5tools_dump_region_data_blocks(
+-                                                       region_space, region_id, stream, info, ctx,
++                                                       region_space, region_id, rawoutstream, info, ctx,
+                                                        &buffer, &curr_pos, ncols, i, elmt_counter);
+                             else
+                                 HERROR(H5E_tools_g, H5E_tools_min_id_g, "invalid region type");
+@@ -1790,7 +1790,7 @@
+         if(!sset)
+             status = h5tools_dump_simple_dset(rawdatastream, info, ctx, dset, p_type);
+         else
+-            status = h5tools_dump_simple_subset(stream, info, ctx, dset, p_type, sset);
++            status = h5tools_dump_simple_subset(rawdatastream, info, ctx, dset, p_type, sset);
+     }
+     else
+         /* space is H5S_NULL */
+Index: tools/lib/CMakeLists.txt
+===================================================================
+--- tools/lib/CMakeLists.txt	(revision 23770)
++++ tools/lib/CMakeLists.txt	(revision 23771)
+@@ -37,8 +37,9 @@
+ )
+ 
+ ADD_LIBRARY (${HDF5_TOOLS_LIB_TARGET} ${LIB_TYPE} ${H5_TOOLS_LIB_SRCS} ${H5_TOOLS_LIB_HDRS})
++TARGET_C_PROPERTIES (${HDF5_TOOLS_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+-SET_GLOBAL_VARIABLE( HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIB_TARGET}")
++SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIB_TARGET}")
+ H5_SET_LIB_OPTIONS (
+     ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TOOLS_LIB_NAME}
+     ${LIB_TYPE}
+Index: tools/h5copy/CMakeLists.txt
+===================================================================
+--- tools/h5copy/CMakeLists.txt	(revision 23770)
++++ tools/h5copy/CMakeLists.txt	(revision 23771)
+@@ -11,6 +11,7 @@
+ # --------------------------------------------------------------------
+ ADD_EXECUTABLE (h5copy ${HDF5_TOOLS_H5COPY_SOURCE_DIR}/h5copy.c)
+ TARGET_NAMING (h5copy ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5copy " " " ")
+ TARGET_LINK_LIBRARIES (h5copy  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5copy PROPERTIES FOLDER tools)
+ 
+@@ -26,6 +27,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5copygentest ${HDF5_TOOLS_H5COPY_SOURCE_DIR}/h5copygentest.c)
+     TARGET_NAMING (h5copygentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5copygentest " " " ")
+     TARGET_LINK_LIBRARIES (h5copygentest ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5copygentest PROPERTIES FOLDER generator/tools)
+ 
+Index: tools/h5import/CMakeLists.txt
+===================================================================
+--- tools/h5import/CMakeLists.txt	(revision 23770)
++++ tools/h5import/CMakeLists.txt	(revision 23771)
+@@ -11,6 +11,7 @@
+ # --------------------------------------------------------------------
+ ADD_EXECUTABLE (h5import ${HDF5_TOOLS_H5IMPORT_SOURCE_DIR}/h5import.c)
+ TARGET_NAMING (h5import ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5import " " " ")
+ TARGET_LINK_LIBRARIES (h5import  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ #SET_TARGET_PROPERTIES (h5import PROPERTIES COMPILE_DEFINITIONS H5DEBUGIMPORT)
+ SET_TARGET_PROPERTIES (h5import PROPERTIES FOLDER tools)
+@@ -29,6 +30,7 @@
+   # --------------------------------------------------------------------
+   ADD_EXECUTABLE (h5importtest ${HDF5_TOOLS_H5IMPORT_SOURCE_DIR}/h5importtest.c)
+   TARGET_NAMING (h5importtest ${LIB_TYPE})
++  TARGET_C_PROPERTIES (h5importtest " " " ")
+   TARGET_LINK_LIBRARIES (h5importtest ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+   SET_TARGET_PROPERTIES (h5importtest PROPERTIES FOLDER tools)
+ 
+Index: tools/h5stat/CMakeLists.txt
+===================================================================
+--- tools/h5stat/CMakeLists.txt	(revision 23770)
++++ tools/h5stat/CMakeLists.txt	(revision 23771)
+@@ -11,6 +11,7 @@
+ # --------------------------------------------------------------------
+ ADD_EXECUTABLE (h5stat ${HDF5_TOOLS_H5STAT_SOURCE_DIR}/h5stat.c)
+ TARGET_NAMING (h5stat ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5stat " " " ")
+ TARGET_LINK_LIBRARIES (h5stat  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5stat PROPERTIES FOLDER tools)
+ 
+@@ -29,6 +30,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5stat_gentest ${HDF5_TOOLS_H5STAT_SOURCE_DIR}/h5stat_gentest.c)
+     TARGET_NAMING (h5stat_gentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5stat_gentest " " " ")
+     TARGET_LINK_LIBRARIES (h5stat_gentest ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5stat_gentest PROPERTIES FOLDER generator/tools)
+     
+Index: tools/h5ls/CMakeLists.txt
+===================================================================
+--- tools/h5ls/CMakeLists.txt	(revision 23770)
++++ tools/h5ls/CMakeLists.txt	(revision 23771)
+@@ -11,6 +11,7 @@
+ #-----------------------------------------------------------------------------
+ ADD_EXECUTABLE (h5ls ${HDF5_TOOLS_H5LS_SOURCE_DIR}/h5ls.c)
+ TARGET_NAMING (h5ls ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5ls " " " ")
+ TARGET_LINK_LIBRARIES (h5ls  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5ls PROPERTIES FOLDER tools)
+ 
+Index: tools/misc/CMakeLists.txt
+===================================================================
+--- tools/misc/CMakeLists.txt	(revision 23770)
++++ tools/misc/CMakeLists.txt	(revision 23771)
+@@ -12,16 +12,19 @@
+ #-- Misc Executables
+ ADD_EXECUTABLE (h5debug ${HDF5_TOOLS_MISC_SOURCE_DIR}/h5debug.c)
+ TARGET_NAMING (h5debug ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5debug " " " ")
+ TARGET_LINK_LIBRARIES (h5debug ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5debug PROPERTIES FOLDER tools)
+ 
+ ADD_EXECUTABLE (h5repart ${HDF5_TOOLS_MISC_SOURCE_DIR}/h5repart.c)
+ TARGET_NAMING (h5repart ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5repart " " " ")
+ TARGET_LINK_LIBRARIES (h5repart ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5repart PROPERTIES FOLDER tools)
+ 
+ ADD_EXECUTABLE (h5mkgrp ${HDF5_TOOLS_MISC_SOURCE_DIR}/h5mkgrp.c)
+ TARGET_NAMING (h5mkgrp ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5mkgrp " " " ")
+ TARGET_LINK_LIBRARIES (h5mkgrp  ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5mkgrp PROPERTIES FOLDER tools)
+ 
+@@ -44,6 +47,7 @@
+   IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (h5repart_gentest ${HDF5_TOOLS_MISC_SOURCE_DIR}/h5repart_gentest.c)
+     TARGET_NAMING (h5repart_gentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5repart_gentest " " " ")
+     TARGET_LINK_LIBRARIES (h5repart_gentest ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5repart_gentest PROPERTIES FOLDER generator/tools)
+     #ADD_TEST (NAME h5repart_gentest COMMAND $<TARGET_FILE:h5repart_gentest>)
+@@ -51,6 +55,7 @@
+ 
+   ADD_EXECUTABLE (h5repart_test ${HDF5_TOOLS_MISC_SOURCE_DIR}/repart_test.c)
+   TARGET_NAMING (h5repart_test ${LIB_TYPE})
++  TARGET_C_PROPERTIES (h5repart_test " " " ")
+   TARGET_LINK_LIBRARIES (h5repart_test ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+   SET_TARGET_PROPERTIES (h5repart_test PROPERTIES FOLDER tools)
+   
+Index: UserMacros.cmake
+===================================================================
+--- UserMacros.cmake	(revision 0)
++++ UserMacros.cmake	(revision 23771)
+@@ -0,0 +1,17 @@
++########################################################
++#  Include file for user options
++########################################################
++
++#-----------------------------------------------------------------------------
++# Option to Build with User Defined Values
++#-----------------------------------------------------------------------------
++MACRO (MACRO_USER_DEFINED_LIBS)
++  SET (USER_DEFINED_VALUE "FALSE")
++ENDMACRO (MACRO_USER_DEFINED_LIBS)
++
++#-------------------------------------------------------------------------------
++OPTION (BUILD_USER_DEFINED_LIBS "Build With User Defined Values" OFF)
++IF (BUILD_USER_DEFINED_LIBS)
++  MACRO_USER_DEFINED_LIBS ()
++ENDIF (BUILD_USER_DEFINED_LIBS)
++ 
+\ No newline at end of file
+Index: hl/test/CMakeLists.txt
+===================================================================
+--- hl/test/CMakeLists.txt	(revision 23770)
++++ hl/test/CMakeLists.txt	(revision 23771)
+@@ -23,6 +23,7 @@
+ MACRO (HL_ADD_TEST hl_name files)
+   ADD_EXECUTABLE (hl_${hl_name} ${hl_name}.c)
+   TARGET_NAMING (hl_${hl_name} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (hl_${hl_name} " " " ")
+   TARGET_LINK_LIBRARIES (hl_${hl_name}
+       ${HDF5_HL_LIB_TARGET}
+       ${HDF5_LIB_TARGET}
+@@ -97,6 +98,7 @@
+ IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+   ADD_EXECUTABLE (hl_gen_test_ds gen_test_ds.c)
+   TARGET_NAMING (hl_gen_test_ds ${LIB_TYPE})
++  TARGET_C_PROPERTIES (hl_gen_test_ds " " " ")
+   TARGET_LINK_LIBRARIES (hl_gen_test_ds
+       ${HDF5_HL_LIB_TARGET}
+       ${HDF5_LIB_TARGET}
+Index: hl/tools/CMakeLists.txt
+===================================================================
+--- hl/tools/CMakeLists.txt	(revision 23770)
++++ hl/tools/CMakeLists.txt	(revision 23771)
+@@ -19,6 +19,7 @@
+ 
+ ADD_EXECUTABLE (gif2h5 ${GIF2H5_SRCS})
+ TARGET_NAMING (gif2h5 ${LIB_TYPE})
++TARGET_C_PROPERTIES (gif2h5 " " " ")
+ TARGET_LINK_LIBRARIES (gif2h5 ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (gif2h5 PROPERTIES FOLDER tools/hl)
+ 
+@@ -29,6 +30,7 @@
+ )
+ ADD_EXECUTABLE (h52gif ${hdf2gif_SRCS})
+ TARGET_NAMING (h52gif ${LIB_TYPE})
++TARGET_C_PROPERTIES (h52gif " " " ")
+ TARGET_LINK_LIBRARIES (h52gif ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h52gif PROPERTIES FOLDER tools/hl)
+ 
+@@ -40,6 +42,7 @@
+ IF (HDF5_BUILD_GENERATORS AND NOT BUILD_SHARED_LIBS)
+     ADD_EXECUTABLE (hl_h52gifgentest ${HDF5_HL_TOOLS_SOURCE_DIR}/gif2h5/h52gifgentst.c)
+     TARGET_NAMING (hl_h52gifgentest ${LIB_TYPE})
++    TARGET_C_PROPERTIES (hl_h52gifgentest " " " ")
+     TARGET_LINK_LIBRARIES (hl_h52gifgentest ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET})
+     SET_TARGET_PROPERTIES (hl_h52gifgentest PROPERTIES FOLDER generator/tools/hl)
+     
+Index: hl/src/CMakeLists.txt
+===================================================================
+--- hl/src/CMakeLists.txt	(revision 23770)
++++ hl/src/CMakeLists.txt	(revision 23771)
+@@ -36,6 +36,7 @@
+ )
+ 
+ ADD_LIBRARY (${HDF5_HL_LIB_TARGET} ${LIB_TYPE} ${HL_SRCS} ${HL_HEADERS})
++TARGET_C_PROPERTIES (${HDF5_HL_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_HL_LIB_TARGET}")
+ H5_SET_LIB_OPTIONS (${HDF5_HL_LIB_TARGET} ${HDF5_HL_LIB_NAME} ${LIB_TYPE})
+Index: hl/c++/test/CMakeLists.txt
+===================================================================
+--- hl/c++/test/CMakeLists.txt	(revision 23770)
++++ hl/c++/test/CMakeLists.txt	(revision 23771)
+@@ -17,6 +17,7 @@
+   INCLUDE_DIRECTORIES (${HDF5_CPP_SRC_DIR}/src)
+ 
+   ADD_EXECUTABLE (hl_ptableTest ${HDF5_HL_CPP_TEST_SOURCE_DIR}/ptableTest.cpp)
++  TARGET_C_PROPERTIES (hl_ptableTest " " " ")
+   TARGET_NAMING (hl_ptableTest ${LIB_TYPE})
+   TARGET_LINK_LIBRARIES (
+       hl_ptableTest
+Index: hl/c++/src/CMakeLists.txt
+===================================================================
+--- hl/c++/src/CMakeLists.txt	(revision 23770)
++++ hl/c++/src/CMakeLists.txt	(revision 23771)
+@@ -10,7 +10,8 @@
+ SET (HDF5_HL_CPP_SRCS ${HDF5_HL_CPP_SRC_SOURCE_DIR}/H5PacketTable.cpp)
+ SET (HDF5_HL_CPP_HDRS ${HDF5_HL_CPP_SRC_SOURCE_DIR}/H5PacketTable.h)
+ 
+-ADD_LIBRARY ( ${HDF5_HL_CPP_LIB_TARGET} ${LIB_TYPE} ${HDF5_HL_CPP_SRCS})
++ADD_LIBRARY (${HDF5_HL_CPP_LIB_TARGET} ${LIB_TYPE} ${HDF5_HL_CPP_SRCS})
++TARGET_C_PROPERTIES (${HDF5_HL_CPP_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (
+     ${HDF5_HL_CPP_LIB_TARGET}
+     ${HDF5_HL_LIB_TARGET}
+Index: hl/c++/examples/CMakeLists.txt
+===================================================================
+--- hl/c++/examples/CMakeLists.txt	(revision 23770)
++++ hl/c++/examples/CMakeLists.txt	(revision 23771)
+@@ -12,6 +12,7 @@
+ # --------------------------------------------------------------------
+ ADD_EXECUTABLE (ptExampleFL ${HDF5_HL_CPP_EXAMPLES_SOURCE_DIR}/ptExampleFL.cpp)
+ TARGET_NAMING (ptExampleFL ${LIB_TYPE})
++TARGET_C_PROPERTIES (ptExampleFL " " " ")
+ TARGET_LINK_LIBRARIES (
+     ptExampleFL
+     ${HDF5_HL_CPP_LIB_TARGET}
+Index: hl/fortran/test/CMakeLists.txt
+===================================================================
+--- hl/fortran/test/CMakeLists.txt	(revision 23770)
++++ hl/fortran/test/CMakeLists.txt	(revision 23771)
+@@ -23,7 +23,7 @@
+ #-- Adding test for hl_f90_tstds
+ ADD_EXECUTABLE (hl_f90_tstds tstds.f90)
+ TARGET_NAMING (hl_f90_tstds ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (hl_f90_tstds "")
++TARGET_FORTRAN_PROPERTIES (hl_f90_tstds " " " ")
+ TARGET_LINK_LIBRARIES (hl_f90_tstds ${HDF5_HL_F90_LIB_TARGET} ${HDF5_F90_LIB_TARGET})
+ SET_TARGET_PROPERTIES (hl_f90_tstds PROPERTIES LINKER_LANGUAGE Fortran)
+ SET_TARGET_PROPERTIES (hl_f90_tstds PROPERTIES FOLDER test/hl/fortran)
+@@ -33,7 +33,7 @@
+ #-- Adding test for hl_f90_tstlite
+ ADD_EXECUTABLE (hl_f90_tstlite tstlite.f90)
+ TARGET_NAMING (hl_f90_tstlite ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (hl_f90_tstlite "")
++TARGET_FORTRAN_PROPERTIES (hl_f90_tstlite " " " ")
+ TARGET_LINK_LIBRARIES (hl_f90_tstlite ${HDF5_HL_F90_LIB_TARGET} ${HDF5_F90_LIB_TARGET})
+ SET_TARGET_PROPERTIES (hl_f90_tstlite PROPERTIES LINKER_LANGUAGE Fortran)
+ SET_TARGET_PROPERTIES (hl_f90_tstlite PROPERTIES FOLDER test/hl/fortran)
+@@ -43,7 +43,7 @@
+ #-- Adding test for hl_f90_tstimage
+ ADD_EXECUTABLE (hl_f90_tstimage tstimage.f90)
+ TARGET_NAMING (hl_f90_tstimage ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (hl_f90_tstimage "")
++TARGET_FORTRAN_PROPERTIES (hl_f90_tstimage " " " ")
+ TARGET_LINK_LIBRARIES (hl_f90_tstimage  ${HDF5_HL_F90_LIB_TARGET} ${HDF5_F90_LIB_TARGET})
+ SET_TARGET_PROPERTIES (hl_f90_tstimage PROPERTIES LINKER_LANGUAGE Fortran)
+ SET_TARGET_PROPERTIES (hl_f90_tstimage PROPERTIES FOLDER test/hl/fortran)
+@@ -53,7 +53,7 @@
+ #-- Adding test for hl_f90_tsttable
+ ADD_EXECUTABLE (hl_f90_tsttable tsttable.f90)
+ TARGET_NAMING (hl_f90_tsttable ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (hl_f90_tsttable "")
++TARGET_FORTRAN_PROPERTIES (hl_f90_tsttable " " " ")
+ TARGET_LINK_LIBRARIES (hl_f90_tsttable ${HDF5_HL_F90_LIB_TARGET} ${HDF5_F90_LIB_TARGET})
+ SET_TARGET_PROPERTIES (hl_f90_tsttable PROPERTIES LINKER_LANGUAGE Fortran)
+ SET_TARGET_PROPERTIES (hl_f90_tsttable PROPERTIES FOLDER test/hl/fortran)
+Index: hl/fortran/src/CMakeLists.txt
+===================================================================
+--- hl/fortran/src/CMakeLists.txt	(revision 23770)
++++ hl/fortran/src/CMakeLists.txt	(revision 23771)
+@@ -26,6 +26,7 @@
+ SET (HDF5_HL_F90_HEADERS ${HDF5_HL_F90_SRC_SOURCE_DIR}/H5LTf90proto.h)
+ 
+ ADD_LIBRARY (${HDF5_HL_F90_C_LIB_TARGET} ${LIB_TYPE} ${HDF5_HL_F90_C_SRCS} ${HDF5_HL_F90_HEADERS})
++TARGET_C_PROPERTIES (${HDF5_HL_F90_C_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_HL_F90_C_LIB_TARGET} ${HDF5_F90_C_LIB_TARGET} ${HDF5_HL_LIB_TARGET})
+ SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_HL_F90_C_LIB_TARGET}")
+ H5_SET_LIB_OPTIONS (${HDF5_HL_F90_C_LIB_TARGET} ${HDF5_HL_F90_C_LIB_NAME} ${LIB_TYPE})
+@@ -61,7 +62,7 @@
+         HDF5F90_WINDOWS
+   )
+ ENDIF (WIN32 AND NOT CYGWIN)
+-TARGET_FORTRAN_WIN_PROPERTIES (${HDF5_HL_F90_LIB_TARGET} ${SHARED_LINK_FLAGS})
++TARGET_FORTRAN_PROPERTIES (${HDF5_HL_F90_LIB_TARGET} " " ${SHARED_LINK_FLAGS})
+ SET_TARGET_PROPERTIES (${HDF5_HL_F90_LIB_TARGET} PROPERTIES LINKER_LANGUAGE Fortran)
+ TARGET_LINK_LIBRARIES (${HDF5_HL_F90_LIB_TARGET} ${HDF5_HL_F90_C_LIB_TARGET} ${HDF5_F90_LIB_TARGET})
+ SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_HL_F90_LIB_TARGET}")
+Index: hl/fortran/examples/CMakeLists.txt
+===================================================================
+--- hl/fortran/examples/CMakeLists.txt	(revision 23770)
++++ hl/fortran/examples/CMakeLists.txt	(revision 23771)
+@@ -18,7 +18,7 @@
+ FOREACH (example ${examples})
+   ADD_EXECUTABLE (hl_f90_ex_${example} ${HDF5_HL_F90_EXAMPLES_SOURCE_DIR}/${example}.f90)
+   TARGET_NAMING (hl_f90_ex_${example} ${LIB_TYPE})
+-  TARGET_FORTRAN_WIN_PROPERTIES (hl_f90_ex_${example} "")
++  TARGET_FORTRAN_PROPERTIES (hl_f90_ex_${example} " " " ")
+   TARGET_LINK_LIBRARIES (hl_f90_ex_${example} 
+       ${HDF5_HL_F90_LIB_TARGET}
+       ${HDF5_F90_LIB_TARGET}
+Index: hl/examples/CMakeLists.txt
+===================================================================
+--- hl/examples/CMakeLists.txt	(revision 23770)
++++ hl/examples/CMakeLists.txt	(revision 23771)
+@@ -43,6 +43,7 @@
+ FOREACH (example ${examples})
+   ADD_EXECUTABLE (hl_ex_${example} ${HDF5_HL_EXAMPLES_SOURCE_DIR}/${example}.c)
+   TARGET_NAMING (hl_ex_${example} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (hl_ex_${example} " " " ")
+   TARGET_LINK_LIBRARIES (hl_ex_${example} ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (hl_ex_${example} PROPERTIES FOLDER examples/hl)
+ 
+Index: configure.ac
+===================================================================
+--- configure.ac	(revision 23770)
++++ configure.ac	(revision 23771)
+@@ -4305,7 +4305,7 @@
+ ## Enable strict file format checks
+ ##
+ AC_SUBST([STRICT_FORMAT_CHECKS])
+-AC_MSG_CHECKING([Whether to perform strict file format checks]);
++AC_MSG_CHECKING([whether to perform strict file format checks]);
+ AC_ARG_ENABLE([strict-format-checks],
+               [AS_HELP_STRING([--enable-strict-format-checks],
+                      [Enable strict file format checks, default=yes if
+@@ -4338,7 +4338,7 @@
+ ## ----------------------------------------------------------------------
+ ## Enable embedded library information
+ ##
+-AC_MSG_CHECKING([Whether to have library information embedded in the executables])
++AC_MSG_CHECKING([whether to have library information embedded in the executables])
+ AC_ARG_ENABLE([embedded-libinfo],
+     [AS_HELP_STRING([--enable-embedded-libinfo],
+ 	[Enable embedded library information [default=yes]])],
+Index: config/cmake/ConfigureChecks.cmake
+===================================================================
+--- config/cmake/ConfigureChecks.cmake	(revision 23770)
++++ config/cmake/ConfigureChecks.cmake	(revision 23771)
+@@ -183,15 +183,15 @@
+   ENDIF (MINGW)
+   SET (H5_HAVE_LIBWS2_32 1)
+   SET (H5_HAVE_LIBWSOCK32 1)
++
++  #-----------------------------------------------------------------------------
++  # These tests need to be manually SET for windows since there is currently
++  # something not quite correct with the actual test implementation. This affects
++  # the 'dt_arith' test and most likely lots of other code
++  # ----------------------------------------------------------------------------
++  SET (H5_FP_TO_ULLONG_RIGHT_MAXIMUM "" CACHE INTERNAL "")
+ ENDIF (WINDOWS)
+ 
+-#-----------------------------------------------------------------------------
+-# These tests need to be manually SET for windows since there is currently
+-# something not quite correct with the actual test implementation. This affects
+-# the 'dt_arith' test and most likely lots of other code
+-# ----------------------------------------------------------------------------
+-SET (H5_FP_TO_ULLONG_RIGHT_MAXIMUM "" CACHE INTERNAL "")
+-
+ # ----------------------------------------------------------------------
+ # END of WINDOWS Hard code Values
+ # ----------------------------------------------------------------------
+@@ -1029,7 +1029,9 @@
+ # integers except 'unsigned long long'.  Other HP-UX systems are unknown
+ # yet. (1/8/05 - SLU)
+ #
+-H5ConversionTests (H5_LDOUBLE_TO_INTEGER_WORKS "Checking IF converting from long double to integers works")
++IF (NOT MSVC)
++  H5ConversionTests (H5_LDOUBLE_TO_INTEGER_WORKS "Checking IF converting from long double to integers works")
++ENDIF (NOT MSVC)
+ # -----------------------------------------------------------------------
+ # Set flag to indicate that the machine can handle conversion from
+ # integers to long double.  (This flag should be set "yes" for all
+@@ -1103,7 +1105,9 @@
+ # where the last 2 bytes of mantissa are lost when compiler tries to do
+ # the conversion, and Cygwin where compiler doesn't do rounding correctly.)
+ #
+-H5ConversionTests (H5_ULLONG_TO_LDOUBLE_PRECISION "Checking IF converting unsigned long long to long double with precision")
++IF (NOT MSVC)
++  H5ConversionTests (H5_ULLONG_TO_LDOUBLE_PRECISION "Checking IF converting unsigned long long to long double with precision")
++ENDIF (NOT MSVC)
+ # ----------------------------------------------------------------------
+ # Set the flag to indicate that the machine can handle overflow converting
+ # all floating-point to all integer types.
+Index: config/cmake/HDFMacros.cmake
+===================================================================
+--- config/cmake/HDFMacros.cmake	(revision 23770)
++++ config/cmake/HDFMacros.cmake	(revision 23771)
+@@ -121,20 +121,67 @@
+ ENDMACRO (HDF_SET_LIB_OPTIONS)
+ 
+ #-------------------------------------------------------------------------------
+-MACRO (TARGET_FORTRAN_WIN_PROPERTIES forttarget addlinkflags)
+-  IF (WIN32 AND MSVC)
++MACRO (TARGET_C_PROPERTIES wintarget addcompileflags addlinkflags)
++  IF (MSVC)
++    TARGET_MSVC_PROPERTIES (${wintarget} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
++  ELSE (MSVC)
+     IF (BUILD_SHARED_LIBS)
++      SET_TARGET_PROPERTIES (${wintarget}
++          PROPERTIES
++              COMPILE_FLAGS "${addcompileflags}"
++              LINK_FLAGS "${addlinkflags}"
++      ) 
++    ELSE (BUILD_SHARED_LIBS)
++      SET_TARGET_PROPERTIES (${wintarget}
++          PROPERTIES
++              COMPILE_FLAGS "${addcompileflags}"
++              LINK_FLAGS "${addlinkflags}"
++      ) 
++    ENDIF (BUILD_SHARED_LIBS)
++  ENDIF (MSVC)
++ENDMACRO (TARGET_C_PROPERTIES)
++
++#-------------------------------------------------------------------------------
++MACRO (TARGET_MSVC_PROPERTIES wintarget addcompileflags addlinkflags)
++  IF (MSVC)
++    IF (BUILD_SHARED_LIBS)
++      SET_TARGET_PROPERTIES (${wintarget}
++          PROPERTIES
++              COMPILE_FLAGS "/dll ${addcompileflags}"
++              LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
++      ) 
++    ELSE (BUILD_SHARED_LIBS)
++      SET_TARGET_PROPERTIES (${wintarget}
++          PROPERTIES
++              COMPILE_FLAGS "${addcompileflags}"
++              LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
++      ) 
++    ENDIF (BUILD_SHARED_LIBS)
++  ENDIF (MSVC)
++ENDMACRO (TARGET_MSVC_PROPERTIES)
++
++#-------------------------------------------------------------------------------
++MACRO (TARGET_FORTRAN_PROPERTIES forttarget addcompileflags addlinkflags)
++  IF (WIN32)
++    TARGET_FORTRAN_WIN_PROPERTIES (${forttarget} "${addcompileflags} ${WIN_COMPILE_FLAGS}" "${addlinkflags} ${WIN_LINK_FLAGS}")
++  ENDIF (WIN32)
++ENDMACRO (TARGET_FORTRAN_PROPERTIES)
++
++#-------------------------------------------------------------------------------
++MACRO (TARGET_FORTRAN_WIN_PROPERTIES forttarget addcompileflags addlinkflags)
++  IF (MSVC)
++    IF (BUILD_SHARED_LIBS)
+       SET_TARGET_PROPERTIES (${forttarget}
+           PROPERTIES
+-              COMPILE_FLAGS "/dll"
++              COMPILE_FLAGS "/dll ${addcompileflags}"
+               LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
+       ) 
+     ELSE (BUILD_SHARED_LIBS)
+       SET_TARGET_PROPERTIES (${forttarget}
+           PROPERTIES
+-              COMPILE_FLAGS "/MD"
++              COMPILE_FLAGS "${addcompileflags}"
+               LINK_FLAGS "/SUBSYSTEM:CONSOLE ${addlinkflags}"
+       ) 
+     ENDIF (BUILD_SHARED_LIBS)
+-  ENDIF (WIN32 AND MSVC)
++  ENDIF (MSVC)
+ ENDMACRO (TARGET_FORTRAN_WIN_PROPERTIES)
+Index: config/cmake/UserMacros/Windows_MT.cmake
+===================================================================
+--- config/cmake/UserMacros/Windows_MT.cmake	(revision 0)
++++ config/cmake/UserMacros/Windows_MT.cmake	(revision 23771)
+@@ -0,0 +1,39 @@
++########################################################
++#  Include file for user options
++########################################################
++
++# To use this option, copy both the macro and option code
++# into the root UserMacros.cmake file. 
++
++#-----------------------------------------------------------------------------
++# Option to Build with Static CRT libraries on Windows
++#-------------------------------------------------------------------------------
++MACRO (TARGET_STATIC_CRT_FLAGS)
++  IF (MSVC AND NOT BUILD_SHARED_LIBS)
++    FOREACH (flag_var
++        CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
++        CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO
++        CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
++        CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
++      IF (${flag_var} MATCHES "/MD")
++        STRING (REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
++      ENDIF (${flag_var} MATCHES "/MD")
++    ENDFOREACH (flag_var)
++    FOREACH (flag_var
++        CMAKE_Fortran_FLAGS CMAKE_Fortran_FLAGS_DEBUG CMAKE_Fortran_FLAGS_RELEASE
++        CMAKE_Fortran_FLAGS_MINSIZEREL CMAKE_Fortran_FLAGS_RELWITHDEBINFO)
++      IF (${flag_var} MATCHES "/libs:dll")
++        STRING (REGEX REPLACE "/libs:dll" "/libs:static" ${flag_var} "${${flag_var}}")
++      ENDIF (${flag_var} MATCHES "/libs:dll")
++    ENDFOREACH (flag_var)
++    SET (WIN_COMPILE_FLAGS "/MT")
++    SET (WIN_LINK_FLAGS "/NODEFAULTLIB:MSVCRT")
++  ENDIF (MSVC AND NOT BUILD_SHARED_LIBS)
++ENDMACRO (TARGET_STATIC_CRT_FLAGS)
++
++#-----------------------------------------------------------------------------
++OPTION (BUILD_STATIC_CRT_LIBS "Build With Static CRT Libraries" OFF)
++IF (BUILD_STATIC_CRT_LIBS)
++  TARGET_STATIC_CRT_FLAGS ()
++ENDIF (BUILD_STATIC_CRT_LIBS)
++ 
+\ No newline at end of file
+Index: MANIFEST
+===================================================================
+--- MANIFEST	(revision 23770)
++++ MANIFEST	(revision 23771)
+@@ -1361,6 +1361,8 @@
+ ./tools/testfiles/topaque.h5
+ ./tools/testfiles/trawdatafile.ddl
+ ./tools/testfiles/trawdatafile.exp
++./tools/testfiles/trawssetfile.ddl
++./tools/testfiles/trawssetfile.exp
+ ./tools/testfiles/tsaf.ddl
+ ./tools/testfiles/tsaf.h5
+ ./tools/testfiles/tscalarattrintsize.ddl
+@@ -2243,8 +2245,12 @@
+ ./config/cmake/NSIS.template.in
+ ./config/cmake/NSIS.InstallOptions.ini.in
+ 
++# CMake-specific User Files
++./config/cmake/UserMacros/Windows_MT.cmake
++
+ ./CMakeLists.txt
+ ./CTestConfig.cmake
++./UserMacros.cmake
+ ./c++/CMakeLists.txt
+ ./c++/examples/CMakeLists.txt
+ ./c++/src/CMakeLists.txt
+Index: c++/test/CMakeLists.txt
+===================================================================
+--- c++/test/CMakeLists.txt	(revision 23770)
++++ c++/test/CMakeLists.txt	(revision 23771)
+@@ -37,6 +37,7 @@
+ 
+ ADD_EXECUTABLE (cpp_testhdf5 ${CPP_TEST_SRCS} )
+ TARGET_NAMING (cpp_testhdf5 ${LIB_TYPE})
++TARGET_C_PROPERTIES (cpp_testhdf5 " " " ")
+ TARGET_LINK_LIBRARIES (cpp_testhdf5
+     ${HDF5_CPP_LIB_TARGET}
+     ${HDF5_LIB_TARGET}
+Index: c++/src/CMakeLists.txt
+===================================================================
+--- c++/src/CMakeLists.txt	(revision 23770)
++++ c++/src/CMakeLists.txt	(revision 23771)
+@@ -82,6 +82,7 @@
+ )
+ 
+ ADD_LIBRARY (${HDF5_CPP_LIB_TARGET} ${LIB_TYPE} ${CPP_SRCS} ${CPP_HDRS})
++TARGET_C_PROPERTIES (${HDF5_CPP_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_CPP_LIB_TARGET}")
+ H5_SET_LIB_OPTIONS (${HDF5_CPP_LIB_TARGET} ${HDF5_CPP_LIB_NAME} ${LIB_TYPE})
+Index: c++/examples/CMakeLists.txt
+===================================================================
+--- c++/examples/CMakeLists.txt	(revision 23770)
++++ c++/examples/CMakeLists.txt	(revision 23771)
+@@ -23,6 +23,7 @@
+ FOREACH (example ${examples})
+   ADD_EXECUTABLE (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
+   TARGET_NAMING (cpp_ex_${example} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (cpp_ex_${example} " " " ")
+   TARGET_LINK_LIBRARIES (cpp_ex_${example} ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (cpp_ex_${example} PROPERTIES FOLDER examples/cpp)
+ ENDFOREACH (example ${examples})
+Index: bin/reconfigure.system
+===================================================================
+--- bin/reconfigure.system	(revision 0)
++++ bin/reconfigure.system	(revision 23771)
+@@ -0,0 +1,143 @@
++#! /bin/sh
++#
++# Copyright by the Board of Trustees of the University of Illinois.
++# All rights reserved.
++#
++# This file is part of HDF5.  The full HDF5 copyright notice, including
++# terms governing use, modification, and redistribution, is contained in
++# the files COPYING and Copyright.html.  COPYING can be found at the root
++# of the source code distribution tree; Copyright.html can be found at the
++# root level of an installed copy of the electronic HDF5 document set and
++# is linked from the top-level documents page.  It can also be found at
++# http://hdfgroup.org/HDF5/doc/Copyright.html.  If you do not have
++# access to either file, you may request a copy from help at hdfgroup.org.
++#
++
++# A script to reconfigure autotools for HDF5, and to recreate other
++# generated files specifc to HDF5.
++# If the paths of the autotools are not specified by the user, they
++# are hardcoded to point to their locations on HDF5 Linux machines.
++# Users can specify the locations of the autotools with the following
++# variables:
++# AUTOCONF, AUTOMAKE, ACLOCAL, AUTOHEADER should be the path to the
++# corresponding tools.
++# LIBTOOL_DIR should be the path to the base libtool directory;
++# $LIBTOOL_DIR/bin/libtool should invoke libtool, while
++# $LIBTOOL_DIR/share/aclocal needs to be included by aclocal.
++# Be very careful when specifying these tools manually!  There are a lot
++# of versions that can get confused (not even counting the m4 utility)!
++
++# HDF5 currently uses the following versions of the autotools:
++AUTOCONF_VERSION="autoconf (GNU Autoconf) 2.69"
++AUTOMAKE_VERSION="automake (GNU automake) 1.12.3"
++AUTOHEADER_VERSION="autoheader (GNU Autoconf) 2.69"
++ACLOCAL_VERSION="aclocal (GNU automake) 1.12.3"
++LIBTOOL_VERSION="(GNU libtool) 2.4.2"
++M4_VERSION="m4 (GNU M4) 1.4.16"
++
++#
++# When upgrading automake's version, don't forget to also update its
++# helper utilities, especially depcomp.
++
++
++# If paths to autotools are not specified by the user, assume tools are
++# running on jam in /mnt/hdf/packages and set paths accordingly.
++if test -z ${AUTOCONF}; then
++  AUTOCONF=/mnt/hdf/packages/autoconf/autoconf-2.69/bin/autoconf
++fi
++if test -z ${AUTOMAKE}; then
++  AUTOMAKE=/mnt/hdf/packages/automake/automake-1.12.3/bin/automake-1.12
++fi
++if test -z ${AUTOHEADER}; then
++  AUTOHEADER=/mnt/hdf/packages/autoconf/autoconf-2.69/bin/autoheader
++fi
++if test -z ${ACLOCAL}; then
++  ACLOCAL=/mnt/hdf/packages/automake/automake-1.12.3/bin/aclocal-1.12
++fi
++if test -z ${LIBTOOL}; then
++  LIBTOOL=/mnt/hdf/packages/libtool/libtool-2.4.2/bin/libtool
++fi
++if test -z ${M4}; then
++  M4=/mnt/hdf/packages/m4/m4-1.4.16/bin/m4
++fi
++
++# Check version numbers of all autotools against the "correct" versions
++AC_VERS=`${AUTOCONF} --version 2>&1 | grep "^${AUTOCONF_VERSION}"`
++if test -z "${AC_VERS}"; then
++   echo "${AUTOCONF} version is not ${AUTOCONF_VERSION}"
++   exit 1
++fi
++AM_VERS=`${AUTOMAKE} --version 2>&1 | grep "^${AUTOMAKE_VERSION}"`
++if test -z "${AM_VERS}"; then
++   echo "${AUTOMAKE} version is not ${AUTOMAKE_VERSION}"
++   exit 1
++fi
++AH_VERS=`${AUTOHEADER} --version 2>&1 | grep "^${AUTOHEADER_VERSION}"`
++if test -z "${AH_VERS}"; then
++   echo "${AUTOHEADER} version is not ${AUTOHEADER_VERSION}"
++   exit 1
++fi
++AL_VERS=`${ACLOCAL} --version 2>&1 | grep "^${ACLOCAL_VERSION}"`
++if test -z "${AL_VERS}"; then
++   echo "${ACLOCAL} version is not ${ACLOCAL_VERSION}"
++   exit 1
++fi
++LT_VERS=`${LIBTOOL} --version 2>&1 | grep "${LIBTOOL_VERSION}"`
++if test -z "${LT_VERS}"; then
++   echo "${LIBTOOL} version is not ${LIBTOOL_VERSION}"
++   exit 1
++fi
++M4_VERS=`${M4} --version 2>&1 | grep "${M4_VERSION}"`
++if test -z "${M4_VERS}"; then
++   echo "${M4} version is not ${M4_VERSION}"
++   exit 1
++fi
++
++# Make sure that the tools are in the path.
++AUTOCONF_DIR=`dirname ${AUTOCONF}`
++LIBTOOL_DIR=`dirname ${LIBTOOL}`
++M4_DIR=`dirname ${M4}`
++PATH=${AUTOCONF_DIR}:${M4_DIR}:$PATH
++
++# Run autoconf/automake commands in order
++  echo ${ACLOCAL} -I ${LIBTOOL_DIR}/../share/aclocal
++  ${ACLOCAL} -I ${LIBTOOL_DIR}/../share/aclocal || exit 1
++
++  echo ${AUTOHEADER}
++  ${AUTOHEADER} || exit 1
++
++  echo ${AUTOMAKE} --add-missing
++  ${AUTOMAKE} --add-missing || exit 1
++
++  echo ${AUTOCONF}
++  ${AUTOCONF} || exit 1
++
++# Clean up top-level Makefile.in
++# pmake wants an argument to be the first non-comment line it encounters
++# in the Makefile.  Automake wants to reorganize the Makefile.
++# To work around this, we post-process the top-level Makefile.in.
++  sed "s/^#xxx//" Makefile.in > Makefile.in.new
++  mv Makefile.in.new Makefile.in
++
++# Run trace script
++# The trace script adds H5TRACE macros to library source files.  It should
++# have no effect on files that don't have HDF5 API macros in them.
++echo
++echo "    Running trace script:"
++bin/trace src/H5*.c || exit 1
++
++# Run make_err
++# make_err automatically generates the H5E headers that create error message
++# types for HDF5.
++echo
++echo "    Running error generation script:"
++bin/make_err src/H5err.txt || exit 1
++
++# Run make_vers
++# make_vers automatically generates the public headers that define the API version
++# macros for HDF5.
++echo
++echo "    Running API version generation script:"
++bin/make_vers src/H5vers.txt || exit 1
++
++exit 0
+Index: bin/reconfigure.local
+===================================================================
+--- bin/reconfigure.local	(revision 0)
++++ bin/reconfigure.local	(revision 23771)
+@@ -0,0 +1,143 @@
++#! /bin/sh
++#
++# Copyright by the Board of Trustees of the University of Illinois.
++# All rights reserved.
++#
++# This file is part of HDF5.  The full HDF5 copyright notice, including
++# terms governing use, modification, and redistribution, is contained in
++# the files COPYING and Copyright.html.  COPYING can be found at the root
++# of the source code distribution tree; Copyright.html can be found at the
++# root level of an installed copy of the electronic HDF5 document set and
++# is linked from the top-level documents page.  It can also be found at
++# http://hdfgroup.org/HDF5/doc/Copyright.html.  If you do not have
++# access to either file, you may request a copy from help at hdfgroup.org.
++#
++
++# A script to reconfigure autotools for HDF5, and to recreate other
++# generated files specifc to HDF5.
++# If the paths of the autotools are not specified by the user, they
++# are hardcoded to point to their locations on HDF5 Linux machines.
++# Users can specify the locations of the autotools with the following
++# variables:
++# AUTOCONF, AUTOMAKE, ACLOCAL, AUTOHEADER should be the path to the
++# corresponding tools.
++# LIBTOOL_DIR should be the path to the base libtool directory;
++# $LIBTOOL_DIR/bin/libtool should invoke libtool, while
++# $LIBTOOL_DIR/share/aclocal needs to be included by aclocal.
++# Be very careful when specifying these tools manually!  There are a lot
++# of versions that can get confused (not even counting the m4 utility)!
++
++# HDF5 currently uses the following versions of the autotools:
++AUTOCONF_VERSION="autoconf (GNU Autoconf) 2.69"
++AUTOMAKE_VERSION="automake (GNU automake) 1.12.2"
++AUTOHEADER_VERSION="autoheader (GNU Autoconf) 2.69"
++ACLOCAL_VERSION="aclocal (GNU automake) 1.12.2"
++LIBTOOL_VERSION="(GNU libtool) 2.4.2"
++M4_VERSION="m4 (GNU M4) 1.4.16"
++
++#
++# When upgrading automake's version, don't forget to also update its
++# helper utilities, especially depcomp.
++
++
++# If paths to autotools are not specified by the user, assume tools are
++# running on jam in /mnt/hdf/packages and set paths accordingly.
++if test -z ${AUTOCONF}; then
++  AUTOCONF=/usr/bin/autoconf
++fi
++if test -z ${AUTOMAKE}; then
++  AUTOMAKE=/usr/bin/automake-1.12
++fi
++if test -z ${AUTOHEADER}; then
++  AUTOHEADER=/usr/bin/autoheader
++fi
++if test -z ${ACLOCAL}; then
++  ACLOCAL=/usr/bin/aclocal-1.12
++fi
++if test -z ${LIBTOOL}; then
++  LIBTOOL=/usr/bin/libtool
++fi
++if test -z ${M4}; then
++  M4=/usr/bin/m4
++fi
++
++# Check version numbers of all autotools against the "correct" versions
++AC_VERS=`${AUTOCONF} --version 2>&1 | grep "^${AUTOCONF_VERSION}"`
++if test -z "${AC_VERS}"; then
++   echo "${AUTOCONF} version is not ${AUTOCONF_VERSION}"
++   exit 1
++fi
++AM_VERS=`${AUTOMAKE} --version 2>&1 | grep "^${AUTOMAKE_VERSION}"`
++if test -z "${AM_VERS}"; then
++   echo "${AUTOMAKE} version is not ${AUTOMAKE_VERSION}"
++   exit 1
++fi
++AH_VERS=`${AUTOHEADER} --version 2>&1 | grep "^${AUTOHEADER_VERSION}"`
++if test -z "${AH_VERS}"; then
++   echo "${AUTOHEADER} version is not ${AUTOHEADER_VERSION}"
++   exit 1
++fi
++AL_VERS=`${ACLOCAL} --version 2>&1 | grep "^${ACLOCAL_VERSION}"`
++if test -z "${AL_VERS}"; then
++   echo "${ACLOCAL} version is not ${ACLOCAL_VERSION}"
++   exit 1
++fi
++LT_VERS=`${LIBTOOL} --version 2>&1 | grep "${LIBTOOL_VERSION}"`
++if test -z "${LT_VERS}"; then
++   echo "${LIBTOOL} version is not ${LIBTOOL_VERSION}"
++   exit 1
++fi
++M4_VERS=`${M4} --version 2>&1 | grep "${M4_VERSION}"`
++if test -z "${M4_VERS}"; then
++   echo "${M4} version is not ${M4_VERSION}"
++   exit 1
++fi
++
++# Make sure that the tools are in the path.
++AUTOCONF_DIR=`dirname ${AUTOCONF}`
++LIBTOOL_DIR=`dirname ${LIBTOOL}`
++M4_DIR=`dirname ${M4}`
++PATH=${AUTOCONF_DIR}:${M4_DIR}:$PATH
++
++# Run autoconf/automake commands in order
++  echo ${ACLOCAL} -I ${LIBTOOL_DIR}/../share/aclocal
++  ${ACLOCAL} -I ${LIBTOOL_DIR}/../share/aclocal || exit 1
++
++  echo ${AUTOHEADER}
++  ${AUTOHEADER} || exit 1
++
++  echo ${AUTOMAKE} --add-missing
++  ${AUTOMAKE} --add-missing || exit 1
++
++  echo ${AUTOCONF}
++  ${AUTOCONF} || exit 1
++
++# Clean up top-level Makefile.in
++# pmake wants an argument to be the first non-comment line it encounters
++# in the Makefile.  Automake wants to reorganize the Makefile.
++# To work around this, we post-process the top-level Makefile.in.
++  sed "s/^#xxx//" Makefile.in > Makefile.in.new
++  mv Makefile.in.new Makefile.in
++
++# Run trace script
++# The trace script adds H5TRACE macros to library source files.  It should
++# have no effect on files that don't have HDF5 API macros in them.
++echo
++echo "    Running trace script:"
++bin/trace src/H5*.c || exit 1
++
++# Run make_err
++# make_err automatically generates the H5E headers that create error message
++# types for HDF5.
++echo
++echo "    Running error generation script:"
++bin/make_err src/H5err.txt || exit 1
++
++# Run make_vers
++# make_vers automatically generates the public headers that define the API version
++# macros for HDF5.
++echo
++echo "    Running API version generation script:"
++bin/make_vers src/H5vers.txt || exit 1
++
++exit 0
+Index: perform/CMakeLists.txt
+===================================================================
+--- perform/CMakeLists.txt	(revision 23770)
++++ perform/CMakeLists.txt	(revision 23771)
+@@ -23,6 +23,7 @@
+ )
+ ADD_EXECUTABLE (h5perf_serial ${h5perf_serial_SRCS})
+ TARGET_NAMING (h5perf_serial ${LIB_TYPE})
++TARGET_C_PROPERTIES (h5perf_serial " " " ")
+ TARGET_LINK_LIBRARIES (h5perf_serial ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (h5perf_serial PROPERTIES FOLDER perform)
+ 
+@@ -38,6 +39,7 @@
+       APPEND PROPERTY COMPILE_DEFINITIONS STANDALONE
+   )
+   TARGET_NAMING (h5perf_serial_alone ${LIB_TYPE})
++  TARGET_C_PROPERTIES (h5perf_serial_alone " " " ")
+   TARGET_LINK_LIBRARIES (h5perf_serial_alone ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+   SET_TARGET_PROPERTIES (h5perf_serial_alone PROPERTIES FOLDER perform)
+ ENDIF (HDF5_BUILD_PERFORM_STANDALONE)
+@@ -48,6 +50,7 @@
+ )
+ ADD_EXECUTABLE(chunk ${chunk_SRCS})
+ TARGET_NAMING (chunk ${LIB_TYPE})
++TARGET_C_PROPERTIES (chunk " " " ")
+ TARGET_LINK_LIBRARIES(chunk ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (chunk PROPERTIES FOLDER perform)
+ 
+@@ -57,6 +60,7 @@
+ )
+ ADD_EXECUTABLE (iopipe ${iopipe_SRCS})
+ TARGET_NAMING (iopipe ${LIB_TYPE})
++TARGET_C_PROPERTIES (iopipe " " " ")
+ TARGET_LINK_LIBRARIES (iopipe ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (iopipe PROPERTIES FOLDER perform)
+ 
+@@ -66,6 +70,7 @@
+ )
+ ADD_EXECUTABLE (overhead ${overhead_SRCS})
+ TARGET_NAMING (overhead ${LIB_TYPE})
++TARGET_C_PROPERTIES (overhead " " " ")
+ TARGET_LINK_LIBRARIES (overhead ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET})
+ SET_TARGET_PROPERTIES (overhead PROPERTIES FOLDER perform)
+ 
+@@ -75,6 +80,7 @@
+ )
+ ADD_EXECUTABLE (perf_meta ${perf_meta_SRCS})
+ TARGET_NAMING (perf_meta ${LIB_TYPE})
++TARGET_C_PROPERTIES (perf_meta " " " ")
+ TARGET_LINK_LIBRARIES (perf_meta ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+ SET_TARGET_PROPERTIES (perf_meta PROPERTIES FOLDER perform)
+ 
+@@ -84,6 +90,7 @@
+ )
+ ADD_EXECUTABLE (zip_perf ${zip_perf_SRCS})
+ TARGET_NAMING (zip_perf ${LIB_TYPE})
++TARGET_C_PROPERTIES (zip_perf " " " ")
+ TARGET_LINK_LIBRARIES (zip_perf ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET})
+ SET_TARGET_PROPERTIES (zip_perf PROPERTIES FOLDER perform)
+ 
+@@ -96,6 +103,7 @@
+   )
+   ADD_EXECUTABLE (h5perf ${h5perf_SRCS})
+   TARGET_NAMING (h5perf ${LIB_TYPE})
++  TARGET_C_PROPERTIES (h5perf " " " ")
+   TARGET_LINK_LIBRARIES (h5perf ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+   SET_TARGET_PROPERTIES (h5perf PROPERTIES FOLDER perform)
+ 
+@@ -111,6 +119,7 @@
+         APPEND PROPERTY COMPILE_DEFINITIONS STANDALONE
+     )
+     TARGET_NAMING (h5perf_alone ${LIB_TYPE})
++    TARGET_C_PROPERTIES (h5perf_alone " " " ")
+     TARGET_LINK_LIBRARIES (h5perf_alone ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+     SET_TARGET_PROPERTIES (h5perf_alone PROPERTIES FOLDER perform)
+   ENDIF (HDF5_BUILD_PERFORM_STANDALONE)
+@@ -122,6 +131,7 @@
+     )
+     ADD_EXECUTABLE (benchpar ${benchpar_SRCS})
+     TARGET_NAMING (benchpar ${LIB_TYPE})
++    TARGET_C_PROPERTIES (benchpar " " " ")
+     TARGET_LINK_LIBRARIES (benchpar ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET})
+     SET_TARGET_PROPERTIES (benchpar PROPERTIES FOLDER perform)
+   ENDIF (HDF5_BUILD_PARALLEL_ALL)
+Index: fortran/test/CMakeLists.txt
+===================================================================
+--- fortran/test/CMakeLists.txt	(revision 23770)
++++ fortran/test/CMakeLists.txt	(revision 23771)
+@@ -10,6 +10,7 @@
+ # Add Test Lib
+ #-----------------------------------------------------------------------------
+ ADD_LIBRARY (${HDF5_F90_C_TEST_LIB_TARGET} ${LIB_TYPE} t.c)
++TARGET_C_PROPERTIES (${HDF5_F90_C_TEST_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_F90_C_TEST_LIB_TARGET}
+     ${HDF5_F90_C_LIB_TARGET}
+     ${HDF5_TEST_LIB_TARGET}
+@@ -28,7 +29,7 @@
+   ENDIF (BUILD_SHARED_LIBS)
+   SET_PROPERTY (TARGET ${HDF5_F90_TEST_LIB_TARGET} APPEND PROPERTY COMPILE_DEFINITIONS HDF5F90_WINDOWS)
+ ENDIF (WIN32 AND NOT CYGWIN)
+-TARGET_FORTRAN_WIN_PROPERTIES (${HDF5_F90_TEST_LIB_TARGET} ${SHARED_LINK_FLAGS})
++TARGET_FORTRAN_PROPERTIES (${HDF5_F90_TEST_LIB_TARGET} " " ${SHARED_LINK_FLAGS})
+ SET_TARGET_PROPERTIES (${HDF5_F90_TEST_LIB_TARGET} PROPERTIES LINKER_LANGUAGE Fortran)
+ TARGET_LINK_LIBRARIES (${HDF5_F90_TEST_LIB_TARGET}
+     ${HDF5_F90_C_TEST_LIB_TARGET}
+@@ -60,7 +61,7 @@
+     tH5Z.f90
+ )
+ TARGET_NAMING (testhdf5_fortran ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (testhdf5_fortran "")
++TARGET_FORTRAN_PROPERTIES (testhdf5_fortran " " " ")
+ TARGET_LINK_LIBRARIES (testhdf5_fortran 
+     ${HDF5_F90_TEST_LIB_TARGET}
+     ${HDF5_F90_LIB_TARGET}
+@@ -84,7 +85,7 @@
+     tH5G_1_8.f90
+ )
+ TARGET_NAMING (testhdf5_fortran_1_8 ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (testhdf5_fortran_1_8 "")
++TARGET_FORTRAN_PROPERTIES (testhdf5_fortran_1_8 " " " ")
+ TARGET_LINK_LIBRARIES (testhdf5_fortran_1_8 
+     ${HDF5_F90_TEST_LIB_TARGET}
+     ${HDF5_F90_LIB_TARGET}
+@@ -112,7 +113,7 @@
+       tH5T_F03.f90
+   )
+   TARGET_NAMING (fortranlib_test_F03 ${LIB_TYPE})
+-  TARGET_FORTRAN_WIN_PROPERTIES (fortranlib_test_F03 "")
++  TARGET_FORTRAN_PROPERTIES (fortranlib_test_F03 " " " ")
+   TARGET_LINK_LIBRARIES (fortranlib_test_F03 
+       ${HDF5_F90_TEST_LIB_TARGET}
+       ${HDF5_F90_LIB_TARGET}
+@@ -131,7 +132,7 @@
+ #-- Adding test for fflush1
+ ADD_EXECUTABLE (fflush1 fflush1.f90)
+ TARGET_NAMING (fflush1 ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (fflush1 "")
++TARGET_FORTRAN_PROPERTIES (fflush1 " " " ")
+ TARGET_LINK_LIBRARIES (fflush1 
+     ${HDF5_F90_LIB_TARGET}
+     ${HDF5_F90_TEST_LIB_TARGET}
+@@ -148,7 +149,7 @@
+ #-- Adding test for fflush2
+ ADD_EXECUTABLE (fflush2 fflush2.f90)
+ TARGET_NAMING (fflush2 ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (fflush2 "")
++TARGET_FORTRAN_PROPERTIES (fflush2 " " " ")
+ TARGET_LINK_LIBRARIES (fflush2 
+     ${HDF5_F90_TEST_LIB_TARGET}
+     ${HDF5_F90_LIB_TARGET}
+Index: fortran/testpar/CMakeLists.txt
+===================================================================
+--- fortran/testpar/CMakeLists.txt	(revision 23770)
++++ fortran/testpar/CMakeLists.txt	(revision 23771)
+@@ -17,7 +17,7 @@
+     mdset.f90
+ )
+ TARGET_NAMING (parallel_test ${LIB_TYPE})
+-TARGET_FORTRAN_WIN_PROPERTIES (parallel_test "")
++TARGET_FORTRAN_PROPERTIES (parallel_test " " " ")
+ TARGET_LINK_LIBRARIES (parallel_test 
+     ${HDF5_F90_TEST_LIB_TARGET}
+     ${HDF5_F90_LIB_TARGET}
+Index: fortran/src/CMakeLists.txt
+===================================================================
+--- fortran/src/CMakeLists.txt	(revision 23770)
++++ fortran/src/CMakeLists.txt	(revision 23771)
+@@ -133,6 +133,7 @@
+ )
+ 
+ ADD_LIBRARY (${HDF5_F90_C_LIB_TARGET} ${LIB_TYPE} ${f90CStub_C_SRCS} ${f90CStub_C_HDRS})
++TARGET_C_PROPERTIES (${HDF5_F90_C_LIB_TARGET} " " " ")
+ TARGET_LINK_LIBRARIES (${HDF5_F90_C_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_LIBS})
+ SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_F90_C_LIB_TARGET}")
+ H5_SET_LIB_OPTIONS (${HDF5_F90_C_LIB_TARGET} ${HDF5_F90_C_LIB_NAME} ${LIB_TYPE})
+@@ -233,7 +234,7 @@
+         HDF5F90_WINDOWS
+   )
+ ENDIF (WIN32 AND NOT CYGWIN)
+-TARGET_FORTRAN_WIN_PROPERTIES (${HDF5_F90_LIB_TARGET} ${SHARED_LINK_FLAGS})
++TARGET_FORTRAN_PROPERTIES (${HDF5_F90_LIB_TARGET} " " ${SHARED_LINK_FLAGS})
+ SET_TARGET_PROPERTIES (${HDF5_F90_LIB_TARGET} PROPERTIES LINKER_LANGUAGE Fortran)
+ TARGET_LINK_LIBRARIES (${HDF5_F90_LIB_TARGET} ${HDF5_F90_C_LIB_TARGET} ${HDF5_LIB_TARGET})
+ IF (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND)
+Index: fortran/examples/CMakeLists.txt
+===================================================================
+--- fortran/examples/CMakeLists.txt	(revision 23770)
++++ fortran/examples/CMakeLists.txt	(revision 23771)
+@@ -41,7 +41,7 @@
+ FOREACH (example ${examples})
+   ADD_EXECUTABLE (f90_ex_${example} ${HDF5_F90_EXAMPLES_SOURCE_DIR}/${example}.f90)
+   TARGET_NAMING (f90_ex_${example} ${LIB_TYPE})
+-  TARGET_FORTRAN_WIN_PROPERTIES (f90_ex_${example} "")
++  TARGET_FORTRAN_PROPERTIES (f90_ex_${example} " " " ")
+   IF (WIN32 AND NOT CYGWIN)
+     SET_PROPERTY (TARGET f90_ex_${example} 
+         APPEND PROPERTY COMPILE_DEFINITIONS 
+@@ -69,7 +69,7 @@
+   FOREACH (example ${F2003_examples})
+     ADD_EXECUTABLE (f03_ex_${example} ${HDF5_F90_EXAMPLES_SOURCE_DIR}/${example}.f90)
+     TARGET_NAMING (f03_ex_${example} ${LIB_TYPE})
+-    TARGET_FORTRAN_WIN_PROPERTIES (f03_ex_${example} "")
++    TARGET_FORTRAN_PROPERTIES (f03_ex_${example} " " " ")
+     IF (WIN32 AND NOT CYGWIN)
+       SET_PROPERTY (TARGET f03_ex_${example} 
+           APPEND PROPERTY COMPILE_DEFINITIONS HDF5F90_WINDOWS
+@@ -95,7 +95,7 @@
+ IF (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND)
+   ADD_EXECUTABLE (f90_ex_ph5example ${HDF5_F90_EXAMPLES_SOURCE_DIR}/ph5example.f90)
+   TARGET_NAMING (f90_ex_ph5example ${LIB_TYPE})
+-  TARGET_FORTRAN_WIN_PROPERTIES (f90_ex_ph5example "")
++  TARGET_FORTRAN_PROPERTIES (f90_ex_ph5example " " " ")
+   IF (WIN32 AND NOT CYGWIN)
+     SET_PROPERTY (TARGET f90_ex_ph5example 
+         APPEND PROPERTY COMPILE_DEFINITIONS 
+Index: CMakeLists.txt
+===================================================================
+--- CMakeLists.txt	(revision 23770)
++++ CMakeLists.txt	(revision 23771)
+@@ -176,7 +176,7 @@
+     "\\1" H5_VERS_MINOR ${_h5public_h_contents})
+ STRING (REGEX REPLACE ".*#define[ \t]+H5_VERS_RELEASE[ \t]+([0-9]*).*$"
+     "\\1" H5_VERS_RELEASE ${_h5public_h_contents})
+-STRING (REGEX REPLACE ".*#define[ \t]+H5_VERS_SUBRELEASE[ \t]+\"([0-9A-Za-z.]*)\".*$"
++STRING (REGEX REPLACE ".*#define[ \t]+H5_VERS_SUBRELEASE[ \t]+\"([0-9A-Za-z._]*)\".*$"
+     "\\1" H5_VERS_SUBRELEASE ${_h5public_h_contents})
+ #MESSAGE (STATUS "VERSION: ${H5_VERS_MAJOR}.${H5_VERS_MINOR}.${H5_VERS_RELEASE}-${H5_VERS_SUBRELEASE}")
+ 
+@@ -190,7 +190,7 @@
+     "\\1" H5_SOVERS_MINOR ${_lt_vers_am_contents})
+ STRING (REGEX REPLACE ".*LT_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
+     "\\1" H5_SOVERS_RELEASE ${_lt_vers_am_contents})
+-MESSAGE (STATUS "SOVERSION: ${H5_SOVERS_MAJOR}.${H5_SOVERS_MINOR}.${H5_SOVERS_RELEASE}")
++MESSAGE (STATUS "SOVERSION: ${H5_SOVERS_MAJOR}.${H5_SOVERS_RELEASE}.${H5_SOVERS_MINOR}")
+ 
+ #-----------------------------------------------------------------------------
+ # Basic HDF5 stuff here
+@@ -205,7 +205,7 @@
+ ELSE (NOT "${H5_VERS_SUBRELEASE}" STREQUAL "")
+   SET (HDF5_PACKAGE_VERSION_STRING "${HDF5_PACKAGE_VERSION}")
+ ENDIF (NOT "${H5_VERS_SUBRELEASE}" STREQUAL "")
+-SET (HDF5_PACKAGE_SOVERSION "${H5_SOVERS_MAJOR}.${H5_SOVERS_MINOR}.${H5_SOVERS_RELEASE}")
++SET (HDF5_PACKAGE_SOVERSION "${H5_SOVERS_MAJOR}.${H5_SOVERS_RELEASE}.${H5_SOVERS_MINOR}")
+ SET (HDF5_PACKAGE_STRING "${HDF5_PACKAGE_NAME} ${HDF5_PACKAGE_VERSION_STRING}")
+ SET (HDF5_PACKAGE_TARNAME "${HDF5_PACKAGE}${HDF_PACKAGE_EXT}")
+ SET (HDF5_PACKAGE_URL "http://www.hdfgroup.org")
+@@ -358,6 +358,8 @@
+ 
+ IF (MSVC)
+   SET (CMAKE_MFC_FLAG 0)
++  SET (WIN_COMPILE_FLAGS "/MD")
++  SET (WIN_LINK_FLAGS "")
+ ENDIF (MSVC)
+ 
+ SET (MAKE_SYSTEM)
+@@ -519,6 +521,11 @@
+ ENDIF (HDF5_USE_16_API_DEFAULT)
+ 
+ #-----------------------------------------------------------------------------
++# Include user macros
++#-----------------------------------------------------------------------------
++INCLUDE (UserMacros.cmake)
++
++#-----------------------------------------------------------------------------
+ # Options for HDF5 Filters
+ #-----------------------------------------------------------------------------
+ MACRO (HDF5_SETUP_FILTERS FILTER)
+Index: examples/CMakeLists.txt
+===================================================================
+--- examples/CMakeLists.txt	(revision 23770)
++++ examples/CMakeLists.txt	(revision 23771)
+@@ -39,6 +39,7 @@
+ FOREACH (example ${examples})
+   ADD_EXECUTABLE (${example} ${HDF5_EXAMPLES_SOURCE_DIR}/${example}.c)
+   TARGET_NAMING (${example} ${LIB_TYPE})
++  TARGET_C_PROPERTIES (${example} " " " ")
+   TARGET_LINK_LIBRARIES (${example} ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (${example} PROPERTIES FOLDER examples)
+ ENDFOREACH (example ${examples})
+@@ -46,6 +47,7 @@
+ IF (H5_HAVE_PARALLEL)
+   ADD_EXECUTABLE (ph5example ${HDF5_EXAMPLES_SOURCE_DIR}/ph5example.c)
+   TARGET_NAMING (ph5example ${LIB_TYPE})
++  TARGET_C_PROPERTIES (ph5example " " " ")
+   TARGET_LINK_LIBRARIES (ph5example ${HDF5_LIB_TARGET})
+   SET_TARGET_PROPERTIES (ph5example PROPERTIES FOLDER examples)
+ ENDIF (H5_HAVE_PARALLEL)
diff --git a/source/c/jhdf5/exceptionImpJHDF5.c b/source/c/jhdf5/exceptionImpJHDF5.c
new file mode 100755
index 0000000..8b80714
--- /dev/null
+++ b/source/c/jhdf5/exceptionImpJHDF5.c
@@ -0,0 +1,769 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This is a utility program used by the HDF Java-C wrapper layer to
+ *  generate exceptions.  This may be called from any part of the
+ *  Java-C interface.
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <stdio.h>
+#include "jni.h"
+/*
+#include "H5Eprivate.h"
+*/
+/*  These types are copied from H5Eprivate.h
+ *  They should be moved to a public include file, and deleted from
+ *  here.
+ */
+#define H5E_NSLOTS      32      /*number of slots in an error stack */
+/*
+* The list of error messages in the system is kept as an array of
+* error_code/message pairs, one for major error numbers and another for
+* minor error numbers.
+*/
+typedef struct H5E_major_mesg_t {
+    H5E_major_t error_code;
+    const char  *str;
+} H5E_major_mesg_t;
+
+typedef struct H5E_minor_mesg_t {
+    H5E_minor_t error_code;
+    const char  *str;
+} H5E_minor_mesg_t;
+
+/* major and minor error numbers */
+typedef struct H5E_num_t {
+    int maj_num;
+    int min_num;
+} H5E_num_t;
+
+int getMajorErrorNumber();
+int getMinorErrorNumber();
+
+/* get the major and minor error numbers on the top of the erroe stack */
+static
+herr_t walk_error_callback(unsigned n, const H5E_error_t *err_desc, void *_err_nums)
+{
+    H5E_num_t *err_nums = (H5E_num_t *)_err_nums;
+
+    if (err_desc) {
+        err_nums->maj_num = err_desc->maj_num;
+        err_nums->min_num = err_desc->min_num;
+    }
+
+    return 0;
+}
+
+
+char *defineHDF5LibraryException(int maj_num);
+
+
+/*
+ * Class:     ch_systemsx_cisd_hdf5_hdf5lib_H5_H5error_1off
+ * Method:    H5error_off
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5error_1off
+  (JNIEnv *env, jclass clss )
+{
+    return H5Eset_auto(H5E_DEFAULT, NULL, NULL);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5_H5error_1off
+ * Method:    H5error_off
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_H5_H5error_1off
+  (JNIEnv *env, jclass clss )
+{
+    return H5Eset_auto(H5E_DEFAULT, NULL, NULL);
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method:    printStackTrace0
+ * Signature: (Ljava/lang/Object;)V
+ *
+ *  Call the HDF-5 library to print the HDF-5 error stack to 'file_name'.
+ */
+JNIEXPORT void JNICALL Java_ncsa_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0
+  (JNIEnv *env, jobject obj, jstring file_name)
+{
+    FILE *stream;
+    char *file;
+
+    if (file_name == NULL)
+        H5Eprint(H5E_DEFAULT, stderr);
+    else
+    {
+#ifdef __cplusplus
+        file = (char *)env->GetStringUTFChars(file_name,0);
+#else
+        file = (char *)(*env)->GetStringUTFChars(env,file_name,0);
+#endif
+        stream = fopen(file, "a+");
+        H5Eprint(H5E_DEFAULT, stream);
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(file_name, file);
+#else
+        (*env)->ReleaseStringUTFChars(env, file_name, file);
+#endif
+        if (stream) fclose(stream);
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method:    getMajorErrorNumber
+ * Signature: ()I
+ *
+ *  Extract the HDF-5 major error number from the HDF-5 error stack.
+ *
+ *  Note:  This relies on undocumented, 'private' code in the HDF-5
+ *  library.  Later releases will have a public interface for this
+ *  purpose.
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_exceptions_HDF5LibraryException_getMajorErrorNumber
+  (JNIEnv *env, jobject obj)
+{
+    H5E_num_t err_nums;
+
+    H5Ewalk(H5E_DEFAULT, H5E_WALK_DOWNWARD, walk_error_callback, &err_nums);
+
+    return (int) err_nums.maj_num;
+}
+
+int getMajorErrorNumber()
+{
+    H5E_num_t err_nums;
+
+    H5Ewalk(H5E_DEFAULT, H5E_WALK_DOWNWARD, walk_error_callback, &err_nums);
+
+    return (int) err_nums.maj_num;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method:    getMinorErrorNumber
+ * Signature: ()I
+ *
+ *  Extract the HDF-5 minor error number from the HDF-5 error stack.
+ *
+ *  Note:  This relies on undocumented, 'private' code in the HDF-5
+ *  library.  Later releases will have a public interface for this
+ *  purpose.
+ */
+JNIEXPORT jint JNICALL Java_ncsa_hdf_hdf5lib_exceptions_HDF5LibraryException_getMinorErrorNumber
+  (JNIEnv *env, jobject obj)
+{
+    return (jint) getMinorErrorNumber();
+}
+
+int getMinorErrorNumber()
+{
+    H5E_num_t err_nums;
+
+    H5Ewalk(H5E_DEFAULT, H5E_WALK_DOWNWARD, walk_error_callback, &err_nums);
+
+    return (int) err_nums.min_num;
+}
+
+/*
+ *  Routine to raise particular Java exceptions from C
+ */
+
+/*
+ *  Create and throw an 'outOfMemoryException'
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5outOfMemory( JNIEnv *env, char *functName)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/OutOfMemoryError");
+#else
+    jc = (*env)->FindClass(env, "java/lang/OutOfMemoryError");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = (env)->NewStringUTF(functName);
+#else
+    str = (*env)->NewStringUTF(env,functName);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw( (jthrowable ) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  OutOfMemoryError: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+
+/*
+ *  A fatal error in a JNI call
+ *  Create and throw an 'InternalError'
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5JNIFatalError( JNIEnv *env, char *functName)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/InternalError");
+#else
+    jc = (*env)->FindClass(env, "java/lang/InternalError");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(functName);
+#else
+    str = (*env)->NewStringUTF(env,functName);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw( (jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  JNIFatal: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*
+ *  A NULL argument in an HDF5 call
+ *  Create and throw an 'NullPointerException'
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5nullArgument( JNIEnv *env, char *functName)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/NullPointerException");
+#else
+    jc = (*env)->FindClass(env, "java/lang/NullPointerException");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(functName);
+#else
+    str = (*env)->NewStringUTF(env,functName);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw((jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  NullPointer: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*
+ *  A bad argument in an HDF5 call
+ *  Create and throw an 'IllegalArgumentException'
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5badArgument( JNIEnv *env, char *functName)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/IllegalArgumentException");
+#else
+    jc = (*env)->FindClass(env, "java/lang/IllegalArgumentException");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(functName);
+#else
+    str = (*env)->NewStringUTF(env,functName);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw((jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  BadArgument: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*
+ *  Some feature Not implemented yet
+ *  Create and throw an 'UnsupportedOperationException'
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5unimplemented( JNIEnv *env, char *functName)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/UnsupportedOperationException");
+#else
+    jc = (*env)->FindClass(env, "java/lang/UnsupportedOperationException");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(functName);
+#else
+    str = (*env)->NewStringUTF(env,functName);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw((jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  Unsupported: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*
+ *  h5libraryError()   determines the HDF-5 major error code
+ *  and creates and throws the appropriate sub-class of
+ *  HDF5LibraryException().  This routine should be called
+ *  whenever a call to the HDF-5 library fails, i.e., when
+ *  the return is -1.
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5libraryError( JNIEnv *env )
+{
+    jmethodID jm;
+    jclass jc;
+    jvalue args[4];
+    char *exception;
+    jobject ex;
+    jstring min_msg_str, maj_msg_str;
+    char *min_msg, *maj_msg;
+    int rval, min_num, maj_num;
+
+    maj_num = (int)getMajorErrorNumber();
+    maj_msg = (char *)H5Eget_major((H5E_major_t)maj_num);
+    exception = (char *)defineHDF5LibraryException(maj_num);
+
+#ifdef __cplusplus
+    jc = env->FindClass(exception);
+#else
+    jc = (*env)->FindClass(env, exception);
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(ILjava/lang/String;ILjava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(ILjava/lang/String;ILjava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        fprintf(stderr, "FATAL ERROR:  h5libraryError: Cannot find constructor\n");
+        return JNI_FALSE;
+    }
+
+    min_num = (int)getMinorErrorNumber();
+    min_msg = (char *)H5Eget_minor((H5E_minor_t)min_num);
+#ifdef __cplusplus
+    maj_msg_str = env->NewStringUTF(maj_msg);
+    min_msg_str = env->NewStringUTF(min_msg);
+#else
+    maj_msg_str = (*env)->NewStringUTF(env,maj_msg);
+    min_msg_str = (*env)->NewStringUTF(env,min_msg);
+#endif
+    if (maj_msg_str == NULL || min_msg_str == NULL)
+    {
+        fprintf(stderr, "FATAL ERROR: h5libraryError: Out of Memory\n");
+        return JNI_FALSE;
+    }
+
+    args[0].i = maj_num;
+    args[1].l = maj_msg_str;
+    args[2].i = min_num;
+    args[3].l = min_msg_str;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, args );
+
+    rval = env->Throw((jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  h5libraryError: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+
+/*
+ *  A constant that is not defined in J2C throws an 'IllegalArgumentException'.
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5illegalConstantError(JNIEnv *env)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass("java/lang/IllegalArgumentException");
+#else
+    jc = (*env)->FindClass(env, "java/lang/IllegalArgumentException");
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF("Illegal java constant");
+#else
+    str = (*env)->NewStringUTF(env,"Illegal java constant");
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA ( jc, jm, (jvalue *)args );
+
+    rval = env->Throw((jthrowable) ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  Unsupported: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*  raiseException().  This routine is called to generate
+ *  an arbitrary Java exception with a particular message.
+ *
+ *  Note:  This routine never returns from the 'throw',
+ *  and the Java native method immediately raises the
+ *  exception.
+ */
+jboolean h5raiseException( JNIEnv *env, char *exception, char *message)
+{
+    jmethodID jm;
+    jclass jc;
+    char * args[2];
+    jobject ex;
+    jstring str;
+    int rval;
+
+#ifdef __cplusplus
+    jc = env->FindClass(exception);
+#else
+    jc = (*env)->FindClass(env, exception);
+#endif
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+#ifdef __cplusplus
+    jm = env->GetMethodID(jc, "<init>", "(Ljava/lang/String;)V");
+#else
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(Ljava/lang/String;)V");
+#endif
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(message);
+#else
+    str = (*env)->NewStringUTF(env,message);
+#endif
+    args[0] = (char *)str;
+    args[1] = 0;
+#ifdef __cplusplus
+    ex = env->NewObjectA (  jc, jm, (jvalue *)args );
+
+    rval = env->Throw( (jthrowable)ex );
+#else
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+#endif
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  raiseException: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+
+/*
+jboolean buildException( JNIEnv *env, char *exception, jint HDFerr)
+{
+    jmethodID jm;
+    jclass jc;
+    int args[2];
+    jobject ex;
+    int rval;
+
+
+    jc = (*env)->FindClass(env, exception);
+    if (jc == NULL) {
+        return JNI_FALSE;
+    }
+    jm = (*env)->GetMethodID(env, jc, "<init>", "(I)V");
+    if (jm == NULL) {
+        return JNI_FALSE;
+    }
+    args[0] = HDFerr;
+    args[1] = 0;
+
+    ex = (*env)->NewObjectA ( env, jc, jm, (jvalue *)args );
+
+    rval = (*env)->Throw(env, ex );
+    if (rval < 0) {
+        fprintf(stderr, "FATAL ERROR:  raiseException: Throw failed\n");
+        return JNI_FALSE;
+    }
+
+    return JNI_TRUE;
+}
+*/
+
+/*
+ *  defineHDF5LibraryException()  returns the name of the sub-class
+ *  which goes with an HDF-5 error code.
+ */
+char *defineHDF5LibraryException(int maj_num)
+{
+    H5E_major_t err_num = (H5E_major_t) maj_num;
+
+    if (err_num == H5E_ARGS)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException";
+    else if (err_num == H5E_RESOURCE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException";
+    else if (err_num == H5E_INTERNAL)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5InternalErrorException";
+    else if (err_num == H5E_FILE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5FileInterfaceException";
+    else if (err_num == H5E_IO)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5LowLevelIOException";
+    else if (err_num == H5E_FUNC)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException";
+    else if (err_num == H5E_ATOM)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5AtomException";
+    else if (err_num == H5E_CACHE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException";
+    else if (err_num == H5E_BTREE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5BtreeException";
+    else if (err_num == H5E_SYM)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5SymbolTableException";
+    else if (err_num == H5E_HEAP)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5HeapException";
+    else if (err_num == H5E_OHDR)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException";
+    else if (err_num == H5E_DATATYPE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException";
+    else if (err_num == H5E_DATASPACE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException";
+    else if (err_num == H5E_DATASET)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException";
+    else if (err_num == H5E_STORAGE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5DataStorageException";
+    else if (err_num == H5E_PLIST)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException";
+    else if (err_num == H5E_ATTR)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5AttributeException";
+    else if (err_num == H5E_PLINE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5DataFiltersException";
+    else if (err_num == H5E_EFL)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5ExternalFileListException";
+    else if (err_num == H5E_REFERENCE)
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5ReferenceException";
+    else
+        return "ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException";
+
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5ConstantsJHDF5.c b/source/c/jhdf5/h5ConstantsJHDF5.c
new file mode 100755
index 0000000..612b64b
--- /dev/null
+++ b/source/c/jhdf5/h5ConstantsJHDF5.c
@@ -0,0 +1,536 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include "h5ConstantsJHDF5.h"
+#include <jni.h>
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5Header converts Java constants defined
+ *            at ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.java to HDF5 runtime global variables.
+ * Method:    J2c
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_J2C
+  (JNIEnv *env, jclass clss, jint java_constant)
+{
+    switch (java_constant)
+    {
+        case JH5_SZIP_MAX_PIXELS_PER_BLOCK  : return  H5_SZIP_MAX_PIXELS_PER_BLOCK;
+        case JH5_SZIP_NN_OPTION_MASK  : return  H5_SZIP_NN_OPTION_MASK;
+        case JH5_SZIP_EC_OPTION_MASK  : return  H5_SZIP_EC_OPTION_MASK;
+        case JH5_SZIP_ALLOW_K13_OPTION_MASK  : return  H5_SZIP_ALLOW_K13_OPTION_MASK;
+        case JH5_SZIP_CHIP_OPTION_MASK  : return  H5_SZIP_CHIP_OPTION_MASK;
+        case JH5D_ALLOC_TIME_DEFAULT  : return  H5D_ALLOC_TIME_DEFAULT;
+        case JH5D_ALLOC_TIME_EARLY  : return  H5D_ALLOC_TIME_EARLY;
+        case JH5D_ALLOC_TIME_ERROR  : return  H5D_ALLOC_TIME_ERROR;
+        case JH5D_ALLOC_TIME_INCR  : return  H5D_ALLOC_TIME_INCR;
+        case JH5D_ALLOC_TIME_LATE  : return  H5D_ALLOC_TIME_LATE;
+        case JH5D_CHUNKED  : return  H5D_CHUNKED;
+        case JH5D_COMPACT  : return  H5D_COMPACT;
+        case JH5D_CONTIGUOUS  : return  H5D_CONTIGUOUS;
+        case JH5D_FILL_TIME_ALLOC  : return  H5D_FILL_TIME_ALLOC;
+        case JH5D_FILL_TIME_ERROR  : return  H5D_FILL_TIME_ERROR;
+        case JH5D_FILL_TIME_NEVER  : return  H5D_FILL_TIME_NEVER;
+        case JH5D_FILL_VALUE_DEFAULT  : return  H5D_FILL_VALUE_DEFAULT;
+        case JH5D_FILL_VALUE_ERROR  : return  H5D_FILL_VALUE_ERROR;
+        case JH5D_FILL_VALUE_UNDEFINED  : return  H5D_FILL_VALUE_UNDEFINED;
+        case JH5D_FILL_VALUE_USER_DEFINED  : return  H5D_FILL_VALUE_USER_DEFINED;
+        case JH5D_LAYOUT_ERROR  : return  H5D_LAYOUT_ERROR;
+        case JH5D_NLAYOUTS  : return  H5D_NLAYOUTS;
+        case JH5D_SPACE_STATUS_ALLOCATED  : return  H5D_SPACE_STATUS_ALLOCATED;
+        case JH5D_SPACE_STATUS_ERROR  : return  H5D_SPACE_STATUS_ERROR;
+        case JH5D_SPACE_STATUS_NOT_ALLOCATED  : return  H5D_SPACE_STATUS_NOT_ALLOCATED;
+        case JH5D_SPACE_STATUS_PART_ALLOCATED  : return  H5D_SPACE_STATUS_PART_ALLOCATED;
+        case JH5E_ALIGNMENT  : return  H5E_ALIGNMENT;
+        case JH5E_ALREADYEXISTS  : return  H5E_ALREADYEXISTS;
+        case JH5E_ALREADYINIT  : return  H5E_ALREADYINIT;
+        case JH5E_ARGS  : return  H5E_ARGS;
+        case JH5E_ATOM  : return  H5E_ATOM;
+        case JH5E_ATTR  : return  H5E_ATTR;
+        case JH5E_BADATOM  : return  H5E_BADATOM;
+        case JH5E_BADFILE  : return  H5E_BADFILE;
+        case JH5E_BADGROUP  : return  H5E_BADGROUP;
+        case JH5E_BADMESG  : return  H5E_BADMESG;
+        case JH5E_BADRANGE  : return  H5E_BADRANGE;
+        case JH5E_BADSELECT  : return  H5E_BADSELECT;
+        case JH5E_BADSIZE  : return  H5E_BADSIZE;
+        case JH5E_BADTYPE  : return  H5E_BADTYPE;
+        case JH5E_BADVALUE  : return  H5E_BADVALUE;
+        case JH5E_BTREE  : return  H5E_BTREE;
+        case JH5E_CACHE  : return  H5E_CACHE;
+        case JH5E_CALLBACK  : return  H5E_CALLBACK;
+        case JH5E_CANAPPLY  : return  H5E_CANAPPLY;
+        /*case JH5E_CANTALLOC  : return  H5E_CANTALLOC; 
+        case JH5E_CANTCHANGE  : return  H5E_CANTCHANGE; removed from 1.6.4*/
+        case JH5E_CANTCLIP  : return  H5E_CANTCLIP;
+        case JH5E_CANTCLOSEFILE  : return  H5E_CANTCLOSEFILE;
+        case JH5E_CANTCONVERT  : return  H5E_CANTCONVERT;
+        case JH5E_CANTCOPY  : return  H5E_CANTCOPY;
+        case JH5E_CANTCOUNT  : return  H5E_CANTCOUNT;
+        case JH5E_CANTCREATE  : return  H5E_CANTCREATE;
+        case JH5E_CANTDEC  : return  H5E_CANTDEC;
+        case JH5E_CANTDECODE  : return  H5E_CANTDECODE;
+        case JH5E_CANTDELETE  : return  H5E_CANTDELETE;
+        case JH5E_CANTENCODE  : return  H5E_CANTENCODE;
+        case JH5E_CANTFLUSH  : return  H5E_CANTFLUSH;
+        case JH5E_CANTFREE  : return  H5E_CANTFREE;
+        case JH5E_CANTGET  : return  H5E_CANTGET;
+        case JH5E_CANTINC  : return  H5E_CANTINC;
+        case JH5E_CANTINIT  : return  H5E_CANTINIT;
+        case JH5E_CANTINSERT  : return  H5E_CANTINSERT;
+        case JH5E_CANTLIST  : return  H5E_CANTLIST;
+        case JH5E_CANTLOAD  : return  H5E_CANTLOAD;
+        case JH5E_CANTLOCK  : return  H5E_CANTLOCK;
+        /*case JH5E_CANTMAKETREE  : return  H5E_CANTMAKETREE; removed from 1.8.0*/
+        case JH5E_CANTNEXT  : return  H5E_CANTNEXT;
+        case JH5E_CANTOPENFILE  : return  H5E_CANTOPENFILE;
+        case JH5E_CANTOPENOBJ  : return  H5E_CANTOPENOBJ;
+        /* case JH5E_CANTRECV  : return  H5E_CANTRECV; removed from 1.6.4*/
+        case JH5E_CANTREGISTER  : return  H5E_CANTREGISTER;
+        case JH5E_CANTRELEASE  : return  H5E_CANTRELEASE;
+        case JH5E_CANTSELECT  : return  H5E_CANTSELECT;
+        /* case JH5E_CANTSENDMDATA  : return  H5E_CANTSENDMDATA; removed from 1.6.4*/
+        case JH5E_CANTSET  : return  H5E_CANTSET;
+        case JH5E_CANTSPLIT  : return  H5E_CANTSPLIT;
+        case JH5E_CANTUNLOCK  : return  H5E_CANTUNLOCK;
+        case JH5E_CLOSEERROR  : return  H5E_CLOSEERROR;
+        case JH5E_COMPLEN  : return  H5E_COMPLEN;
+/* removed from HDF5 1.6.5
+        case JH5E_CWG  : return  H5E_CWG;
+*/
+        case JH5E_DATASET  : return  H5E_DATASET;
+        case JH5E_DATASPACE  : return  H5E_DATASPACE;
+        case JH5E_DATATYPE  : return  H5E_DATATYPE;
+        case JH5E_DUPCLASS  : return  H5E_DUPCLASS;
+        case JH5E_EFL  : return  H5E_EFL;
+        case JH5E_EXISTS  : return  H5E_EXISTS;
+        case JH5E_FCNTL  : return  H5E_FCNTL;
+        case JH5E_FILE  : return  H5E_FILE;
+        case JH5E_FILEEXISTS  : return  H5E_FILEEXISTS;
+        case JH5E_FILEOPEN  : return  H5E_FILEOPEN;
+        /* case JH5E_FPHDF5  : return  H5E_FPHDF5; removed from 1.6.4*/
+        case JH5E_FUNC  : return  H5E_FUNC;
+        case JH5E_HEAP  : return  H5E_HEAP;
+        case JH5E_INTERNAL  : return  H5E_INTERNAL;
+        case JH5E_IO  : return  H5E_IO;
+        case JH5E_LINK  : return  H5E_LINK;
+        case JH5E_LINKCOUNT  : return  H5E_LINKCOUNT;
+        case JH5E_MOUNT  : return  H5E_MOUNT;
+        case JH5E_MPI  : return  H5E_MPI;
+        case JH5E_MPIERRSTR  : return  H5E_MPIERRSTR;
+        case JH5E_NOFILTER  : return  H5E_NOFILTER;
+        case JH5E_NOIDS  : return  H5E_NOIDS;
+        case JH5E_NONE_MAJOR  : return  H5E_NONE_MAJOR;
+        case JH5E_NONE_MINOR  : return  H5E_NONE_MINOR;
+        case JH5E_NOSPACE  : return  H5E_NOSPACE;
+        case JH5E_NOTCACHED  : return  H5E_NOTCACHED;
+        case JH5E_NOTFOUND  : return  H5E_NOTFOUND;
+        case JH5E_NOTHDF5  : return  H5E_NOTHDF5;
+        case JH5E_OHDR  : return  H5E_OHDR;
+        case JH5E_OVERFLOW  : return  H5E_OVERFLOW;
+        case JH5E_PLINE  : return  H5E_PLINE;
+        case JH5E_PLIST  : return  H5E_PLIST;
+        case JH5E_PROTECT  : return  H5E_PROTECT;
+        case JH5E_READERROR  : return  H5E_READERROR;
+        case JH5E_REFERENCE  : return  H5E_REFERENCE;
+        case JH5E_RESOURCE  : return  H5E_RESOURCE;
+        case JH5E_RS  : return  H5E_RS;
+        case JH5E_SEEKERROR  : return  H5E_SEEKERROR;
+        case JH5E_SETLOCAL  : return  H5E_SETLOCAL;
+        /*case JH5E_SLINK  : return  H5E_SLINK; removed from 1.8.0*/
+        case JH5E_STORAGE  : return  H5E_STORAGE;
+        case JH5E_SYM  : return  H5E_SYM;
+        /*case JH5E_TBBT  : return  H5E_TBBT; removed from 1.8.0*/
+        case JH5E_TRUNCATED  : return  H5E_TRUNCATED;
+        case JH5E_TST  : return  H5E_TST;
+        case JH5E_UNINITIALIZED  : return  H5E_UNINITIALIZED;
+        case JH5E_UNSUPPORTED  : return  H5E_UNSUPPORTED;
+        case JH5E_VERSION  : return  H5E_VERSION;
+        case JH5E_VFL  : return  H5E_VFL;
+        case JH5E_WALK_DOWNWARD  : return  H5E_WALK_DOWNWARD;
+        case JH5E_WALK_UPWARD  : return  H5E_WALK_UPWARD;
+        case JH5E_WRITEERROR  : return  H5E_WRITEERROR;
+        case JH5F_ACC_CREAT  : return  H5F_ACC_CREAT;
+        case JH5F_ACC_DEBUG  : return  H5F_ACC_DEBUG;
+        case JH5F_ACC_EXCL  : return  H5F_ACC_EXCL;
+        case JH5F_ACC_RDONLY  : return  H5F_ACC_RDONLY;
+        case JH5F_ACC_RDWR  : return  H5F_ACC_RDWR;
+        case JH5F_ACC_TRUNC  : return  H5F_ACC_TRUNC;
+        case JH5F_CLOSE_DEFAULT  : return  H5F_CLOSE_DEFAULT;
+        case JH5F_CLOSE_SEMI  : return  H5F_CLOSE_SEMI;
+        case JH5F_CLOSE_STRONG  : return  H5F_CLOSE_STRONG;
+        case JH5F_CLOSE_WEAK  : return  H5F_CLOSE_WEAK;
+        case JH5F_OBJ_ALL  : return  H5F_OBJ_ALL;
+        case JH5F_OBJ_DATASET  : return  H5F_OBJ_DATASET;
+        case JH5F_OBJ_DATATYPE  : return  H5F_OBJ_DATATYPE;
+        case JH5F_OBJ_FILE  : return  H5F_OBJ_FILE;
+        case JH5F_OBJ_ATTR  : return  H5F_OBJ_ATTR;
+        case JH5F_OBJ_GROUP  : return  H5F_OBJ_GROUP;
+        case JH5F_SCOPE_GLOBAL  : return  H5F_SCOPE_GLOBAL;
+        case JH5F_SCOPE_LOCAL  : return  H5F_SCOPE_LOCAL;
+        case JH5F_UNLIMITED  : return  (int)H5F_UNLIMITED;
+	case JH5F_LIBVER_EARLIEST  : return H5F_LIBVER_EARLIEST;
+	case JH5F_LIBVER_LATEST  : return H5F_LIBVER_LATEST;
+        case JH5G_DATASET  : return  H5G_DATASET;
+        case JH5G_GROUP  : return  H5G_GROUP;
+        case JH5G_LINK  : return  H5G_LINK;
+        case JH5G_LINK_ERROR  : return  H5G_LINK_ERROR;
+        case JH5G_LINK_HARD  : return  H5G_LINK_HARD;
+        case JH5G_LINK_SOFT  : return  H5G_LINK_SOFT;
+        case JH5G_NLIBTYPES  : return  H5G_NLIBTYPES;
+        case JH5G_NTYPES  : return  H5G_NTYPES;
+        case JH5G_NUSERTYPES  : return  H5G_NUSERTYPES;
+        /*case JH5G_RESERVED_4  : return  H5G_RESERVED_4; removed from 1.8*/
+        case JH5G_RESERVED_5  : return  H5G_RESERVED_5;
+        case JH5G_RESERVED_6  : return  H5G_RESERVED_6;
+        case JH5G_RESERVED_7  : return  H5G_RESERVED_7;
+        case JH5G_SAME_LOC  : return  H5G_SAME_LOC;
+        case JH5G_TYPE  : return  H5G_TYPE;
+        case JH5G_UNKNOWN  : return  H5G_UNKNOWN;
+        /*case JH5G_USERTYPE  : return  H5G_USERTYPE;*/
+        case JH5I_ATTR  : return  H5I_ATTR;
+        case JH5I_BADID  : return  H5I_BADID;
+        case JH5I_DATASET  : return  H5I_DATASET;
+        case JH5I_DATASPACE  : return  H5I_DATASPACE;
+        case JH5I_DATATYPE  : return  H5I_DATATYPE;
+        case JH5I_FILE  : return  H5I_FILE;
+/* removed from HDF5 1.6.5
+        case JH5I_FILE_CLOSING  : return  H5I_FILE_CLOSING;
+*/
+        case JH5I_GENPROP_CLS  : return  H5I_GENPROP_CLS;
+        case JH5I_GENPROP_LST  : return  H5I_GENPROP_LST;
+        case JH5I_GROUP  : return  H5I_GROUP;
+        case JH5I_INVALID_HID  : return  H5I_INVALID_HID;
+        /*case JH5I_NGROUPS  : return  H5I_NGROUPS; removed from 1.8*/
+        case JH5I_REFERENCE  : return  H5I_REFERENCE;
+        /* case JH5I_TEMPBUF  : return  H5I_TEMPBUF; removed from 1.6.4*/
+        case JH5I_VFL  : return  H5I_VFL;
+        case JH5O_TYPE_UNKNOWN : return H5O_TYPE_UNKNOWN;
+        case JH5O_TYPE_GROUP : return H5O_TYPE_GROUP;
+        case JH5O_TYPE_DATASET : return H5O_TYPE_DATASET;
+        case JH5O_TYPE_NAMED_DATATYPE : return H5O_TYPE_NAMED_DATATYPE;
+        case JH5O_TYPE_NTYPES : return H5O_TYPE_NTYPES;
+        case JH5L_TYPE_ERROR : return H5L_TYPE_ERROR;
+        case JH5L_TYPE_HARD : return H5L_TYPE_HARD;
+        case JH5L_TYPE_SOFT : return H5L_TYPE_SOFT;
+        case JH5L_TYPE_EXTERNAL : return H5L_TYPE_EXTERNAL;
+        case JH5L_TYPE_MAX : return H5L_TYPE_MAX;
+        case JH5P_DATASET_CREATE  : return  H5P_DATASET_CREATE;
+        case JH5P_DATASET_CREATE_DEFAULT  : return  H5P_DATASET_CREATE_DEFAULT;
+        case JH5P_DATASET_XFER  : return  H5P_DATASET_XFER;
+        case JH5P_DATASET_XFER_DEFAULT  : return  H5P_DATASET_XFER_DEFAULT;
+        case JH5P_FILE_ACCESS  : return  H5P_FILE_ACCESS;
+        case JH5P_FILE_ACCESS_DEFAULT  : return  H5P_FILE_ACCESS_DEFAULT;
+        case JH5P_FILE_CREATE  : return  H5P_FILE_CREATE;
+        case JH5P_FILE_CREATE_DEFAULT  : return  H5P_FILE_CREATE_DEFAULT;
+        case JH5P_DEFAULT  : return  H5P_DEFAULT;
+        /*case JH5P_MOUNT  : return H5P_MOUNT;
+        case JH5P_MOUNT_DEFAULT  : return  H5P_MOUNT_DEFAULT; removed from 1.8*/
+        case JH5P_NO_CLASS  : return  H5P_NO_CLASS;
+        /*case JH5P_NO_CLASS_DEFAULT  : return  H5P_NO_CLASS_DEFAULT; removed from 1.8*/
+        case JH5P_ROOT : return H5P_ROOT;
+        case JH5P_OBJECT_CREATE : return H5P_OBJECT_CREATE;
+        case JH5P_DATASET_ACCESS : return H5P_DATASET_ACCESS;
+        case JH5P_DATASET_ACCESS_DEFAULT : return H5P_DATASET_ACCESS_DEFAULT;
+        case JH5P_FILE_MOUNT : return H5P_FILE_MOUNT;
+        case JH5P_FILE_MOUNT_DEFAULT : return H5P_FILE_MOUNT_DEFAULT;
+        case JH5P_GROUP_CREATE : return H5P_GROUP_CREATE;
+        case JH5P_GROUP_CREATE_DEFAULT : return H5P_GROUP_CREATE_DEFAULT;
+        case JH5P_GROUP_ACCESS : return H5P_GROUP_ACCESS;
+        case JH5P_GROUP_ACCESS_DEFAULT : return H5P_GROUP_ACCESS_DEFAULT;
+        case JH5P_DATATYPE_CREATE : return H5P_DATATYPE_CREATE;
+        case JH5P_DATATYPE_CREATE_DEFAULT : return H5P_DATATYPE_CREATE_DEFAULT;
+        case JH5P_DATATYPE_ACCESS : return H5P_DATATYPE_ACCESS;
+        case JH5P_DATATYPE_ACCESS_DEFAULT : return H5P_DATATYPE_ACCESS_DEFAULT;
+        case JH5P_STRING_CREATE : return H5P_STRING_CREATE;
+        case JH5P_ATTRIBUTE_CREATE : return H5P_ATTRIBUTE_CREATE;
+        case JH5P_ATTRIBUTE_CREATE_DEFAULT : return H5P_ATTRIBUTE_CREATE_DEFAULT;
+        case JH5P_OBJECT_COPY : return H5P_OBJECT_COPY;
+        case JH5P_OBJECT_COPY_DEFAULT : return H5P_OBJECT_COPY_DEFAULT;
+        case JH5P_LINK_CREATE : return H5P_LINK_CREATE;
+        case JH5P_LINK_CREATE_DEFAULT : return H5P_LINK_CREATE_DEFAULT;
+        case JH5P_LINK_ACCESS : return H5P_LINK_ACCESS;
+        case JH5P_LINK_ACCESS_DEFAULT : return H5P_LINK_ACCESS_DEFAULT;
+        case JH5R_BADTYPE  : return  H5R_BADTYPE;
+        case JH5R_DATASET_REGION  : return  H5R_DATASET_REGION;
+        /*case JH5R_INTERNAL  : return  H5R_INTERNAL; removed from 1.8*/
+        case JH5R_MAXTYPE  : return  H5R_MAXTYPE;
+        case JH5R_OBJ_REF_BUF_SIZE  : return  H5R_OBJ_REF_BUF_SIZE;
+        case JH5R_OBJECT  : return  H5R_OBJECT;
+        case JH5S_ALL  : return  H5S_ALL;
+        /*case JH5S_COMPLEX  : return  H5S_COMPLEX; removed from 1.8*/
+        case JH5S_MAX_RANK  : return  H5S_MAX_RANK;
+        case JH5S_NO_CLASS  : return  H5S_NO_CLASS;
+        case JH5S_NULL : return H5S_NULL;
+        case JH5S_SCALAR  : return  H5S_SCALAR;
+        case JH5S_SEL_ALL  : return  H5S_SEL_ALL;
+        case JH5S_SEL_ERROR  : return  H5S_SEL_ERROR;
+        case JH5S_SEL_HYPERSLABS  : return  H5S_SEL_HYPERSLABS;
+        case JH5S_SEL_N  : return  H5S_SEL_N;
+        case JH5S_SEL_NONE  : return  H5S_SEL_NONE;
+        case JH5S_SEL_POINTS  : return  H5S_SEL_POINTS;
+        case JH5S_SELECT_AND  : return  H5S_SELECT_AND;
+        case JH5S_SELECT_APPEND  : return  H5S_SELECT_APPEND;
+        case JH5S_SELECT_INVALID  : return  H5S_SELECT_INVALID;
+        case JH5S_SELECT_NOOP  : return  H5S_SELECT_NOOP;
+        case JH5S_SELECT_NOTA  : return  H5S_SELECT_NOTA;
+        case JH5S_SELECT_NOTB  : return  H5S_SELECT_NOTB;
+        case JH5S_SELECT_OR  : return  H5S_SELECT_OR;
+        case JH5S_SELECT_PREPEND  : return  H5S_SELECT_PREPEND;
+        case JH5S_SELECT_SET  : return  H5S_SELECT_SET;
+        case JH5S_SELECT_XOR  : return  H5S_SELECT_XOR;
+        case JH5S_SIMPLE  : return  H5S_SIMPLE;
+        case JH5S_UNLIMITED  : return  (int)H5S_UNLIMITED;
+        case JH5T_ALPHA_B16  : return  H5T_ALPHA_B16;
+        case JH5T_ALPHA_B32  : return  H5T_ALPHA_B32;
+        case JH5T_ALPHA_B64  : return  H5T_ALPHA_B64;
+        case JH5T_ALPHA_B8  : return  H5T_ALPHA_B8;
+        case JH5T_ALPHA_F32  : return  H5T_ALPHA_F32;
+        case JH5T_ALPHA_F64  : return  H5T_ALPHA_F64;
+        case JH5T_ALPHA_I16  : return  H5T_ALPHA_I16;
+        case JH5T_ALPHA_I32  : return  H5T_ALPHA_I32;
+        case JH5T_ALPHA_I64  : return  H5T_ALPHA_I64;
+        case JH5T_ALPHA_I8  : return  H5T_ALPHA_I8;
+        case JH5T_ALPHA_U16  : return  H5T_ALPHA_U16;
+        case JH5T_ALPHA_U32  : return  H5T_ALPHA_U32;
+        case JH5T_ALPHA_U64  : return  H5T_ALPHA_U64;
+        case JH5T_ALPHA_U8  : return  H5T_ALPHA_U8;
+        case JH5T_ARRAY  : return  H5T_ARRAY;
+        case JH5T_BITFIELD  : return  H5T_BITFIELD;
+        case JH5T_BKG_NO  : return  H5T_BKG_NO;
+        case JH5T_BKG_YES  : return  H5T_BKG_YES;
+        case JH5T_C_S1  : return  H5T_C_S1;
+        case JH5T_COMPOUND  : return  H5T_COMPOUND;
+        case JH5T_CONV_CONV  : return  H5T_CONV_CONV;
+        case JH5T_CONV_FREE  : return  H5T_CONV_FREE;
+        case JH5T_CONV_INIT  : return  H5T_CONV_INIT;
+        case JH5T_CSET_ASCII  : return  H5T_CSET_ASCII;
+        case JH5T_CSET_ERROR  : return  H5T_CSET_ERROR;
+        case JH5T_CSET_UTF8   : return  H5T_CSET_UTF8;
+        case JH5T_CSET_RESERVED_10  : return  H5T_CSET_RESERVED_10;
+        case JH5T_CSET_RESERVED_11  : return  H5T_CSET_RESERVED_11;
+        case JH5T_CSET_RESERVED_12  : return  H5T_CSET_RESERVED_12;
+        case JH5T_CSET_RESERVED_13  : return  H5T_CSET_RESERVED_13;
+        case JH5T_CSET_RESERVED_14  : return  H5T_CSET_RESERVED_14;
+        case JH5T_CSET_RESERVED_15  : return  H5T_CSET_RESERVED_15;
+        case JH5T_CSET_RESERVED_2  : return  H5T_CSET_RESERVED_2;
+        case JH5T_CSET_RESERVED_3  : return  H5T_CSET_RESERVED_3;
+        case JH5T_CSET_RESERVED_4  : return  H5T_CSET_RESERVED_4;
+        case JH5T_CSET_RESERVED_5  : return  H5T_CSET_RESERVED_5;
+        case JH5T_CSET_RESERVED_6  : return  H5T_CSET_RESERVED_6;
+        case JH5T_CSET_RESERVED_7  : return  H5T_CSET_RESERVED_7;
+        case JH5T_CSET_RESERVED_8  : return  H5T_CSET_RESERVED_8;
+        case JH5T_CSET_RESERVED_9  : return  H5T_CSET_RESERVED_9;
+        case JH5T_DIR_ASCEND  : return  H5T_DIR_ASCEND;
+        case JH5T_DIR_DEFAULT  : return  H5T_DIR_DEFAULT;
+        case JH5T_DIR_DESCEND  : return  H5T_DIR_DESCEND;
+        case JH5T_ENUM  : return  H5T_ENUM;
+        case JH5T_FLOAT  : return  H5T_FLOAT;
+        case JH5T_FORTRAN_S1  : return  H5T_FORTRAN_S1;
+        case JH5T_IEEE_F32BE  : return  H5T_IEEE_F32BE;
+        case JH5T_IEEE_F32LE  : return  H5T_IEEE_F32LE;
+        case JH5T_IEEE_F64BE  : return  H5T_IEEE_F64BE;
+        case JH5T_IEEE_F64LE  : return  H5T_IEEE_F64LE;
+        case JH5T_INTEGER  : return  H5T_INTEGER;
+        case JH5T_INTEL_B16  : return  H5T_INTEL_B16;
+        case JH5T_INTEL_B32  : return  H5T_INTEL_B32;
+        case JH5T_INTEL_B64  : return  H5T_INTEL_B64;
+        case JH5T_INTEL_B8  : return  H5T_INTEL_B8;
+        case JH5T_INTEL_F32  : return  H5T_INTEL_F32;
+        case JH5T_INTEL_F64  : return  H5T_INTEL_F64;
+        case JH5T_INTEL_I16  : return  H5T_INTEL_I16;
+        case JH5T_INTEL_I32  : return  H5T_INTEL_I32;
+        case JH5T_INTEL_I64  : return  H5T_INTEL_I64;
+        case JH5T_INTEL_I8  : return  H5T_INTEL_I8;
+        case JH5T_INTEL_U16  : return  H5T_INTEL_U16;
+        case JH5T_INTEL_U32  : return  H5T_INTEL_U32;
+        case JH5T_INTEL_U64  : return  H5T_INTEL_U64;
+        case JH5T_INTEL_U8  : return  H5T_INTEL_U8;
+        case JH5T_MIPS_B16  : return  H5T_MIPS_B16;
+        case JH5T_MIPS_B32  : return  H5T_MIPS_B32;
+        case JH5T_MIPS_B64  : return  H5T_MIPS_B64;
+        case JH5T_MIPS_B8  : return  H5T_MIPS_B8;
+        case JH5T_MIPS_F32  : return  H5T_MIPS_F32;
+        case JH5T_MIPS_F64  : return  H5T_MIPS_F64;
+        case JH5T_MIPS_I16  : return  H5T_MIPS_I16;
+        case JH5T_MIPS_I32  : return  H5T_MIPS_I32;
+        case JH5T_MIPS_I64  : return  H5T_MIPS_I64;
+        case JH5T_MIPS_I8  : return  H5T_MIPS_I8;
+        case JH5T_MIPS_U16  : return  H5T_MIPS_U16;
+        case JH5T_MIPS_U32  : return  H5T_MIPS_U32;
+        case JH5T_MIPS_U64  : return  H5T_MIPS_U64;
+        case JH5T_MIPS_U8  : return  H5T_MIPS_U8;
+        case JH5T_NATIVE_B16  : return  H5T_NATIVE_B16;
+        case JH5T_NATIVE_B32  : return  H5T_NATIVE_B32;
+        case JH5T_NATIVE_B64  : return  H5T_NATIVE_B64;
+        case JH5T_NATIVE_B8  : return  H5T_NATIVE_B8;
+        case JH5T_NATIVE_CHAR  : return  H5T_NATIVE_CHAR;
+        case JH5T_NATIVE_DOUBLE  : return  H5T_NATIVE_DOUBLE;
+        case JH5T_NATIVE_FLOAT  : return  H5T_NATIVE_FLOAT;
+        case JH5T_NATIVE_HADDR  : return  H5T_NATIVE_HADDR;
+        case JH5T_NATIVE_HBOOL  : return  H5T_NATIVE_HBOOL;
+        case JH5T_NATIVE_HERR  : return  H5T_NATIVE_HERR;
+        case JH5T_NATIVE_HSIZE  : return  H5T_NATIVE_HSIZE;
+        case JH5T_NATIVE_HSSIZE  : return  H5T_NATIVE_HSSIZE;
+        case JH5T_NATIVE_INT  : return  H5T_NATIVE_INT;
+        case JH5T_NATIVE_INT_FAST16  : return  H5T_NATIVE_INT_FAST16;
+        case JH5T_NATIVE_INT_FAST32  : return  H5T_NATIVE_INT_FAST32;
+        case JH5T_NATIVE_INT_FAST64  : return  H5T_NATIVE_INT_FAST64;
+        case JH5T_NATIVE_INT_FAST8  : return  H5T_NATIVE_INT_FAST8;
+        case JH5T_NATIVE_INT_LEAST16  : return  H5T_NATIVE_INT_LEAST16;
+        case JH5T_NATIVE_INT_LEAST32  : return  H5T_NATIVE_INT_LEAST32;
+        case JH5T_NATIVE_INT_LEAST64  : return  H5T_NATIVE_INT_LEAST64;
+        case JH5T_NATIVE_INT_LEAST8  : return  H5T_NATIVE_INT_LEAST8;
+        case JH5T_NATIVE_INT16  : return  H5T_NATIVE_INT16;
+        case JH5T_NATIVE_INT32  : return  H5T_NATIVE_INT32;
+        case JH5T_NATIVE_INT64  : return  H5T_NATIVE_INT64;
+        case JH5T_NATIVE_INT8  : return  H5T_NATIVE_INT8;
+        case JH5T_NATIVE_LDOUBLE  : return  H5T_NATIVE_LDOUBLE;
+        case JH5T_NATIVE_LLONG  : return  H5T_NATIVE_LLONG;
+        case JH5T_NATIVE_LONG  : return  H5T_NATIVE_LONG;
+        case JH5T_NATIVE_OPAQUE  : return  H5T_NATIVE_OPAQUE;
+        case JH5T_NATIVE_SCHAR  : return  H5T_NATIVE_SCHAR;
+        case JH5T_NATIVE_SHORT  : return  H5T_NATIVE_SHORT;
+        case JH5T_NATIVE_UCHAR  : return  H5T_NATIVE_UCHAR;
+        case JH5T_NATIVE_UINT  : return  H5T_NATIVE_UINT;
+        case JH5T_NATIVE_UINT_FAST16  : return  H5T_NATIVE_UINT_FAST16;
+        case JH5T_NATIVE_UINT_FAST32  : return  H5T_NATIVE_UINT_FAST32;
+        case JH5T_NATIVE_UINT_FAST64  : return  H5T_NATIVE_UINT_FAST64;
+        case JH5T_NATIVE_UINT_FAST8  : return  H5T_NATIVE_UINT_FAST8;
+        case JH5T_NATIVE_UINT_LEAST16  : return  H5T_NATIVE_UINT_LEAST16;
+        case JH5T_NATIVE_UINT_LEAST32  : return  H5T_NATIVE_UINT_LEAST32;
+        case JH5T_NATIVE_UINT_LEAST64  : return  H5T_NATIVE_UINT_LEAST64;
+        case JH5T_NATIVE_UINT_LEAST8  : return  H5T_NATIVE_UINT_LEAST8;
+        case JH5T_NATIVE_UINT16  : return  H5T_NATIVE_UINT16;
+        case JH5T_NATIVE_UINT32  : return  H5T_NATIVE_UINT32;
+        case JH5T_NATIVE_UINT64  : return  H5T_NATIVE_UINT64;
+        case JH5T_NATIVE_UINT8  : return  H5T_NATIVE_UINT8;
+        case JH5T_NATIVE_ULLONG  : return  H5T_NATIVE_ULLONG;
+        case JH5T_NATIVE_ULONG  : return  H5T_NATIVE_ULONG;
+        case JH5T_NATIVE_USHORT  : return  H5T_NATIVE_USHORT;
+        case JH5T_NCLASSES  : return  H5T_NCLASSES;
+        case JH5T_NO_CLASS  : return  H5T_NO_CLASS;
+        case JH5T_NORM_ERROR  : return  H5T_NORM_ERROR;
+        case JH5T_NORM_IMPLIED  : return  H5T_NORM_IMPLIED;
+        case JH5T_NORM_MSBSET  : return  H5T_NORM_MSBSET;
+        case JH5T_NORM_NONE  : return  H5T_NORM_NONE;
+        case JH5T_NPAD  : return  H5T_NPAD;
+        case JH5T_NSGN  : return  H5T_NSGN;
+        case JH5T_OPAQUE  : return  H5T_OPAQUE;
+        case JH5T_OPAQUE_TAG_MAX  : return  H5T_OPAQUE_TAG_MAX;
+        case JH5T_ORDER_BE  : return  H5T_ORDER_BE;
+        case JH5T_ORDER_ERROR  : return  H5T_ORDER_ERROR;
+        case JH5T_ORDER_LE  : return  H5T_ORDER_LE;
+        case JH5T_ORDER_NONE  : return  H5T_ORDER_NONE;
+        case JH5T_ORDER_VAX  : return  H5T_ORDER_VAX;
+        case JH5T_PAD_BACKGROUND  : return  H5T_PAD_BACKGROUND;
+        case JH5T_PAD_ERROR  : return  H5T_PAD_ERROR;
+        case JH5T_PAD_ONE  : return  H5T_PAD_ONE;
+        case JH5T_PAD_ZERO  : return  H5T_PAD_ZERO;
+        case JH5T_PERS_DONTCARE  : return  H5T_PERS_DONTCARE;
+        case JH5T_PERS_HARD  : return  H5T_PERS_HARD;
+        case JH5T_PERS_SOFT  : return  H5T_PERS_SOFT;
+        case JH5T_REFERENCE  : return  H5T_REFERENCE;
+        case JH5T_SGN_2  : return  H5T_SGN_2;
+        case JH5T_SGN_ERROR  : return  H5T_SGN_ERROR;
+        case JH5T_SGN_NONE  : return  H5T_SGN_NONE;
+        case JH5T_STD_B16BE  : return  H5T_STD_B16BE;
+        case JH5T_STD_B16LE  : return  H5T_STD_B16LE;
+        case JH5T_STD_B32BE  : return  H5T_STD_B32BE;
+        case JH5T_STD_B32LE  : return  H5T_STD_B32LE;
+        case JH5T_STD_B64BE  : return  H5T_STD_B64BE;
+        case JH5T_STD_B64LE  : return  H5T_STD_B64LE;
+        case JH5T_STD_B8BE  : return  H5T_STD_B8BE;
+        case JH5T_STD_B8LE  : return  H5T_STD_B8LE;
+        case JH5T_STD_I16BE  : return  H5T_STD_I16BE;
+        case JH5T_STD_I16LE  : return  H5T_STD_I16LE;
+        case JH5T_STD_I32BE  : return  H5T_STD_I32BE;
+        case JH5T_STD_I32LE  : return  H5T_STD_I32LE;
+        case JH5T_STD_I64BE  : return  H5T_STD_I64BE;
+        case JH5T_STD_I64LE  : return  H5T_STD_I64LE;
+        case JH5T_STD_I8BE  : return  H5T_STD_I8BE;
+        case JH5T_STD_I8LE  : return  H5T_STD_I8LE;
+        case JH5T_STD_REF_DSETREG  : return  H5T_STD_REF_DSETREG;
+        case JH5T_STD_REF_OBJ  : return  H5T_STD_REF_OBJ;
+        case JH5T_STD_U16BE  : return  H5T_STD_U16BE;
+        case JH5T_STD_U16LE  : return  H5T_STD_U16LE;
+        case JH5T_STD_U32BE  : return  H5T_STD_U32BE;
+        case JH5T_STD_U32LE  : return  H5T_STD_U32LE;
+        case JH5T_STD_U64BE  : return  H5T_STD_U64BE;
+        case JH5T_STD_U64LE  : return  H5T_STD_U64LE;
+        case JH5T_STD_U8BE  : return  H5T_STD_U8BE;
+        case JH5T_STD_U8LE  : return  H5T_STD_U8LE;
+        case JH5T_STR_ERROR  : return  H5T_STR_ERROR;
+        case JH5T_STR_NULLPAD  : return  H5T_STR_NULLPAD;
+        case JH5T_STR_NULLTERM  : return  H5T_STR_NULLTERM;
+        case JH5T_STR_RESERVED_10  : return  H5T_STR_RESERVED_10;
+        case JH5T_STR_RESERVED_11  : return  H5T_STR_RESERVED_11;
+        case JH5T_STR_RESERVED_12  : return  H5T_STR_RESERVED_12;
+        case JH5T_STR_RESERVED_13  : return  H5T_STR_RESERVED_13;
+        case JH5T_STR_RESERVED_14  : return  H5T_STR_RESERVED_14;
+        case JH5T_STR_RESERVED_15  : return  H5T_STR_RESERVED_15;
+        case JH5T_STR_RESERVED_3  : return  H5T_STR_RESERVED_3;
+        case JH5T_STR_RESERVED_4  : return  H5T_STR_RESERVED_4;
+        case JH5T_STR_RESERVED_5  : return  H5T_STR_RESERVED_5;
+        case JH5T_STR_RESERVED_6  : return  H5T_STR_RESERVED_6;
+        case JH5T_STR_RESERVED_7  : return  H5T_STR_RESERVED_7;
+        case JH5T_STR_RESERVED_8  : return  H5T_STR_RESERVED_8;
+        case JH5T_STR_RESERVED_9  : return  H5T_STR_RESERVED_9;
+        case JH5T_STR_SPACEPAD  : return  H5T_STR_SPACEPAD;
+        case JH5T_STRING  : return  H5T_STRING;
+        case JH5T_TIME  : return  H5T_TIME;
+        case JH5T_UNIX_D32BE  : return  H5T_UNIX_D32BE;
+        case JH5T_UNIX_D32LE  : return  H5T_UNIX_D32LE;
+        case JH5T_UNIX_D64BE  : return  H5T_UNIX_D64BE;
+        case JH5T_UNIX_D64LE  : return  H5T_UNIX_D64LE;
+        case JH5T_VARIABLE  : return (jint) H5T_VARIABLE;
+        case JH5T_VLEN  : return  H5T_VLEN;
+        case JH5Z_CB_CONT  : return  H5Z_CB_CONT;
+        case JH5Z_CB_ERROR  : return  H5Z_CB_ERROR;
+        case JH5Z_CB_FAIL  : return  H5Z_CB_FAIL;
+        case JH5Z_CB_NO  : return  H5Z_CB_NO;
+        case JH5Z_DISABLE_EDC  : return  H5Z_DISABLE_EDC;
+        case JH5Z_ENABLE_EDC  : return  H5Z_ENABLE_EDC;
+        case JH5Z_ERROR_EDC  : return  H5Z_ERROR_EDC;
+        case JH5Z_FILTER_DEFLATE  : return  H5Z_FILTER_DEFLATE;
+        case JH5Z_FILTER_ERROR  : return  H5Z_FILTER_ERROR;
+        case JH5Z_FILTER_FLETCHER32  : return  H5Z_FILTER_FLETCHER32;
+        case JH5Z_FILTER_MAX  : return  H5Z_FILTER_MAX;
+        case JH5Z_FILTER_NONE  : return  H5Z_FILTER_NONE;
+        case JH5Z_FILTER_RESERVED  : return  H5Z_FILTER_RESERVED;
+        case JH5Z_FILTER_SHUFFLE  : return  H5Z_FILTER_SHUFFLE;
+        case JH5Z_FILTER_SZIP  : return  H5Z_FILTER_SZIP;
+        case JH5Z_FLAG_DEFMASK  : return  H5Z_FLAG_DEFMASK;
+        case JH5Z_FLAG_INVMASK  : return  H5Z_FLAG_INVMASK;
+        case JH5Z_FLAG_MANDATORY  : return  H5Z_FLAG_MANDATORY;
+        case JH5Z_FLAG_OPTIONAL  : return  H5Z_FLAG_OPTIONAL;
+        case JH5Z_FLAG_REVERSE  : return  H5Z_FLAG_REVERSE;
+        case JH5Z_FLAG_SKIP_EDC  : return  H5Z_FLAG_SKIP_EDC;
+        case JH5Z_MAX_NFILTERS  : return  H5Z_MAX_NFILTERS;
+        case JH5Z_NO_EDC  : return  H5Z_NO_EDC;
+        case JH5Z_FILTER_CONFIG_ENCODE_ENABLED  : return  H5Z_FILTER_CONFIG_ENCODE_ENABLED;
+        case JH5Z_FILTER_CONFIG_DECODE_ENABLED  : return  H5Z_FILTER_CONFIG_DECODE_ENABLED;
+        case JH5Z_SO_INT : return H5Z_SO_INT;
+        case JH5Z_SO_FLOAT_DSCALE : return H5Z_SO_FLOAT_DSCALE;
+        case JH5Z_SO_FLOAT_ESCALE : return H5Z_SO_FLOAT_ESCALE;
+
+        default:
+            h5illegalConstantError(env);
+	    return -1;
+    }
+}
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5ConstantsJHDF5.h b/source/c/jhdf5/h5ConstantsJHDF5.h
new file mode 100755
index 0000000..16c9ed3
--- /dev/null
+++ b/source/c/jhdf5/h5ConstantsJHDF5.h
@@ -0,0 +1,505 @@
+/*
+ *  The Java constants defined at ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.java
+ *
+ *  The values are arbitrary, but it is very important that this
+ *  file has the same values as HDF5Constants.java.
+ */
+
+#include "hdf5.h"
+
+#define JH5_SZIP_MAX_PIXELS_PER_BLOCK 1000
+#define JH5_SZIP_NN_OPTION_MASK 1010
+#define JH5_SZIP_EC_OPTION_MASK 1020
+#define JH5_SZIP_ALLOW_K13_OPTION_MASK 1021
+#define JH5_SZIP_CHIP_OPTION_MASK 1022
+#define JH5D_ALLOC_TIME_DEFAULT 1030
+#define JH5D_ALLOC_TIME_EARLY 1040
+#define JH5D_ALLOC_TIME_ERROR 1050
+#define JH5D_ALLOC_TIME_INCR 1060
+#define JH5D_ALLOC_TIME_LATE 1070
+#define JH5D_CHUNKED 1080
+#define JH5D_COMPACT 1090
+#define JH5D_CONTIGUOUS 1100
+#define JH5D_FILL_TIME_ALLOC 1110
+#define JH5D_FILL_TIME_ERROR 1120
+#define JH5D_FILL_TIME_NEVER 1130
+#define JH5D_FILL_VALUE_DEFAULT 1140
+#define JH5D_FILL_VALUE_ERROR 1150
+#define JH5D_FILL_VALUE_UNDEFINED 1160
+#define JH5D_FILL_VALUE_USER_DEFINED 1170
+#define JH5D_LAYOUT_ERROR 1180
+#define JH5D_NLAYOUTS 1190
+#define JH5D_SPACE_STATUS_ALLOCATED 1200
+#define JH5D_SPACE_STATUS_ERROR 1210
+#define JH5D_SPACE_STATUS_NOT_ALLOCATED 1220
+#define JH5D_SPACE_STATUS_PART_ALLOCATED 1230
+#define JH5E_ALIGNMENT 1240
+#define JH5E_ALREADYEXISTS 1250
+#define JH5E_ALREADYINIT 1260
+#define JH5E_ARGS 1270
+#define JH5E_ATOM 1280
+#define JH5E_ATTR 1290
+#define JH5E_BADATOM 1300
+#define JH5E_BADFILE 1310
+#define JH5E_BADGROUP 1320
+#define JH5E_BADMESG 1330
+#define JH5E_BADRANGE 1340
+#define JH5E_BADSELECT 1350
+#define JH5E_BADSIZE 1360
+#define JH5E_BADTYPE 1370
+#define JH5E_BADVALUE 1380
+#define JH5E_BTREE 1390
+#define JH5E_CACHE 1400
+#define JH5E_CALLBACK 1410
+#define JH5E_CANAPPLY 1420
+/*#define JH5E_CANTALLOC 1430 removed from 1.6.4 */
+/*#define JH5E_CANTCHANGE 1440 removed from 1.6.4 */
+#define JH5E_CANTCLIP 1450
+#define JH5E_CANTCLOSEFILE 1460
+#define JH5E_CANTCONVERT 1470
+#define JH5E_CANTCOPY 1480
+#define JH5E_CANTCOUNT 1490
+#define JH5E_CANTCREATE 1500
+#define JH5E_CANTDEC 1510
+#define JH5E_CANTDECODE 1520
+#define JH5E_CANTDELETE 1530
+#define JH5E_CANTENCODE 1540
+#define JH5E_CANTFLUSH 1550
+#define JH5E_CANTFREE 1560
+#define JH5E_CANTGET 1570
+#define JH5E_CANTINC 1580
+#define JH5E_CANTINIT 1590
+#define JH5E_CANTINSERT 1600
+#define JH5E_CANTLIST 1610
+#define JH5E_CANTLOAD 1620
+#define JH5E_CANTLOCK 1630
+#define JH5E_CANTMAKETREE 1640
+#define JH5E_CANTNEXT 1650
+#define JH5E_CANTOPENFILE 1660
+#define JH5E_CANTOPENOBJ 1670
+/*#define JH5E_CANTRECV 1680 removed from 1.6.4 */
+#define JH5E_CANTREGISTER 1690
+#define JH5E_CANTRELEASE 1700
+#define JH5E_CANTSELECT 1710
+/*#define JH5E_CANTSENDMDATA 1720 removed from 1.6.4 */
+#define JH5E_CANTSET 1730
+#define JH5E_CANTSPLIT 1740
+#define JH5E_CANTUNLOCK 1750
+#define JH5E_CLOSEERROR 1760
+#define JH5E_COMPLEN 1770
+/*#define JH5E_CWG 1780 removed from 1.6.5 */
+#define JH5E_DATASET 1790
+#define JH5E_DATASPACE 1800
+#define JH5E_DATATYPE 1810
+#define JH5E_DUPCLASS 1820
+#define JH5E_EFL 1830
+#define JH5E_EXISTS 1840
+#define JH5E_FCNTL 1850
+#define JH5E_FILE 1860
+#define JH5E_FILEEXISTS 1870
+#define JH5E_FILEOPEN 1880
+/*#define JH5E_FPHDF5 1890 removed from 1.6.4 */
+#define JH5E_FUNC 1900
+#define JH5E_HEAP 1910
+#define JH5E_INTERNAL 1920
+#define JH5E_IO 1930
+#define JH5E_LINK 1940
+#define JH5E_LINKCOUNT 1950
+#define JH5E_MOUNT 1960
+#define JH5E_MPI 1970
+#define JH5E_MPIERRSTR 1980
+#define JH5E_NOFILTER 1990
+#define JH5E_NOIDS 2000
+#define JH5E_NONE_MAJOR 2010
+#define JH5E_NONE_MINOR 2020
+#define JH5E_NOSPACE 2030
+#define JH5E_NOTCACHED 2040
+#define JH5E_NOTFOUND 2050
+#define JH5E_NOTHDF5 2060
+#define JH5E_OHDR 2070
+#define JH5E_OVERFLOW 2080
+#define JH5E_PLINE 2090
+#define JH5E_PLIST 2100
+#define JH5E_PROTECT 2110
+#define JH5E_READERROR 2120
+#define JH5E_REFERENCE 2130
+#define JH5E_RESOURCE 2140
+#define JH5E_RS 2150
+#define JH5E_SEEKERROR 2160
+#define JH5E_SETLOCAL 2170
+/*#define JH5E_SLINK 2180 removed from 1.8.0 */
+#define JH5E_STORAGE 2190
+#define JH5E_SYM 2200
+/*#define JH5E_TBBT 2210 removed from 1.8.0 */
+#define JH5E_TRUNCATED 2220
+#define JH5E_TST 2230
+#define JH5E_UNINITIALIZED 2240
+#define JH5E_UNSUPPORTED 2250
+#define JH5E_VERSION 2260
+#define JH5E_VFL 2270
+#define JH5E_WALK_DOWNWARD 2280
+#define JH5E_WALK_UPWARD 2290
+#define JH5E_WRITEERROR 2300
+#define JH5F_ACC_CREAT 2310
+#define JH5F_ACC_DEBUG 2320
+#define JH5F_ACC_EXCL 2330
+#define JH5F_ACC_RDONLY 2340
+#define JH5F_ACC_RDWR 2350
+#define JH5F_ACC_TRUNC 2360
+#define JH5F_CLOSE_DEFAULT 2370
+#define JH5F_CLOSE_SEMI 2380
+#define JH5F_CLOSE_STRONG 2390
+#define JH5F_CLOSE_WEAK 2400
+#define JH5F_OBJ_ALL 2410
+#define JH5F_OBJ_ATTR 2415
+#define JH5F_OBJ_DATASET 2420
+#define JH5F_OBJ_DATATYPE 2430
+#define JH5F_OBJ_FILE 2440
+#define JH5F_OBJ_GROUP 2450
+#define JH5F_OBJ_LOCAL 2455
+#define JH5F_SCOPE_DOWN 2460
+#define JH5F_SCOPE_GLOBAL 2470
+#define JH5F_SCOPE_LOCAL 2480
+#define JH5F_UNLIMITED 2490
+#define JH5F_LIBVER_EARLIEST 2494
+#define JH5F_LIBVER_LATEST 2495
+#define JH5G_DATASET 2500
+#define JH5G_GROUP 2510
+#define JH5G_LINK 2520
+#define JH5G_LINK_ERROR 2530
+#define JH5G_LINK_HARD 2540
+#define JH5G_LINK_SOFT 2550
+#define JH5G_NLIBTYPES 2560
+#define JH5G_NTYPES 2570
+#define JH5G_NUSERTYPES 2580
+/*#define JH5G_RESERVED_4 2590 removed from 1.8.0 */
+#define JH5G_RESERVED_5 2600
+#define JH5G_RESERVED_6 2610
+#define JH5G_RESERVED_7 2620
+#define JH5G_SAME_LOC 2630
+#define JH5G_TYPE 2640
+#define JH5G_UNKNOWN 2650
+#define JH5G_USERTYPE 2660
+#define JH5I_ATTR 2670
+#define JH5I_BADID 2680
+#define JH5I_DATASET 2690
+#define JH5I_DATASPACE 2700
+#define JH5I_DATATYPE 2710
+#define JH5I_FILE 2720
+#define JH5I_FILE_CLOSING 2730
+#define JH5I_GENPROP_CLS 2740
+#define JH5I_GENPROP_LST 2750
+#define JH5I_GROUP 2760
+#define JH5I_INVALID_HID 2770
+/*#define JH5I_NGROUPS 2780 removed from 1.8.0 */
+#define JH5I_REFERENCE 2790
+/*#define JH5I_TEMPBUF 2800 removed from 1.8.0 */
+#define JH5I_VFL 2810
+#define JH5O_TYPE_UNKNOWN 5510
+#define JH5O_TYPE_GROUP 5520
+#define JH5O_TYPE_DATASET 5530
+#define JH5O_TYPE_NAMED_DATATYPE 5540
+#define JH5O_TYPE_NTYPES 5550
+#define JH5L_TYPE_ERROR 5560
+#define JH5L_TYPE_HARD 5570
+#define JH5L_TYPE_SOFT 5580
+#define JH5L_TYPE_EXTERNAL 5590
+#define JH5L_TYPE_MAX 5600
+#define JH5P_DATASET_CREATE 2820
+#define JH5P_DATASET_CREATE_DEFAULT 2830
+#define JH5P_DATASET_XFER 2840
+#define JH5P_DATASET_XFER_DEFAULT 2850
+#define JH5P_DEFAULT 2860
+#define JH5P_FILE_ACCESS 2870
+#define JH5P_FILE_ACCESS_DEFAULT 2880
+#define JH5P_FILE_CREATE 2890
+#define JH5P_FILE_CREATE_DEFAULT 2900
+/*#define JH5P_MOUNT 2910
+#define JH5P_MOUNT_DEFAULT 2920 removed from 1.8.0 */
+#define JH5P_NO_CLASS 2930
+/*#define JH5P_NO_CLASS_DEFAULT 2940 removed from 1.8.0 */
+#define JH5P_ROOT 6000
+#define JH5P_OBJECT_CREATE 6010
+#define JH5P_DATASET_ACCESS 6020
+#define JH5P_DATASET_ACCESS_DEFAULT 6030
+#define JH5P_FILE_MOUNT 6040
+#define JH5P_FILE_MOUNT_DEFAULT 6050
+#define JH5P_GROUP_CREATE 6060
+#define JH5P_GROUP_CREATE_DEFAULT 6070
+#define JH5P_GROUP_ACCESS 6080
+#define JH5P_GROUP_ACCESS_DEFAULT 6090
+#define JH5P_DATATYPE_CREATE 6100
+#define JH5P_DATATYPE_CREATE_DEFAULT 6110
+#define JH5P_DATATYPE_ACCESS 6120
+#define JH5P_DATATYPE_ACCESS_DEFAULT 6130
+#define JH5P_STRING_CREATE 6140
+#define JH5P_ATTRIBUTE_CREATE 6150
+#define JH5P_ATTRIBUTE_CREATE_DEFAULT 6160
+#define JH5P_OBJECT_COPY 6170
+#define JH5P_OBJECT_COPY_DEFAULT 6180 
+#define JH5P_LINK_CREATE 6190
+#define JH5P_LINK_CREATE_DEFAULT 6200
+#define JH5P_LINK_ACCESS 6210
+#define JH5P_LINK_ACCESS_DEFAULT 6220
+#define JH5R_BADTYPE 2950
+#define JH5R_DATASET_REGION 2960
+/*#define JH5R_INTERNAL 2970 removed from 1.8.0 */
+#define JH5R_MAXTYPE 2980
+#define JH5R_OBJ_REF_BUF_SIZE 2990
+#define JH5R_OBJECT 3000
+#define JH5S_ALL 3010
+/*#define JH5S_COMPLEX 3020 removed from 1.8.0 */
+#define JH5S_MAX_RANK 3030
+#define JH5S_NO_CLASS 3040
+#define JH5S_NULL 3045
+#define JH5S_SCALAR 3050
+#define JH5S_SEL_ALL 3060
+#define JH5S_SEL_ERROR 3070
+#define JH5S_SEL_HYPERSLABS 3080
+#define JH5S_SEL_N 3090
+#define JH5S_SEL_NONE 3100
+#define JH5S_SEL_POINTS 3110
+#define JH5S_SELECT_AND 3120
+#define JH5S_SELECT_APPEND 3130
+#define JH5S_SELECT_INVALID 3140
+#define JH5S_SELECT_NOOP 3150
+#define JH5S_SELECT_NOTA 3160
+#define JH5S_SELECT_NOTB 3170
+#define JH5S_SELECT_OR 3180
+#define JH5S_SELECT_PREPEND 3190
+#define JH5S_SELECT_SET 3200
+#define JH5S_SELECT_XOR 3210
+#define JH5S_SIMPLE 3220
+#define JH5S_UNLIMITED 3230
+#define JH5T_ALPHA_B16 3240
+#define JH5T_ALPHA_B32 3250
+#define JH5T_ALPHA_B64 3260
+#define JH5T_ALPHA_B8 3270
+#define JH5T_ALPHA_F32 3280
+#define JH5T_ALPHA_F64 3290
+#define JH5T_ALPHA_I16 3300
+#define JH5T_ALPHA_I32 3310
+#define JH5T_ALPHA_I64 3320
+#define JH5T_ALPHA_I8 3330
+#define JH5T_ALPHA_U16 3340
+#define JH5T_ALPHA_U32 3350
+#define JH5T_ALPHA_U64 3360
+#define JH5T_ALPHA_U8 3370
+#define JH5T_ARRAY 3380
+#define JH5T_BITFIELD 3390
+#define JH5T_BKG_NO 3400
+#define JH5T_BKG_YES 3410
+#define JH5T_C_S1 3420
+#define JH5T_COMPOUND 3430
+#define JH5T_CONV_CONV 3440
+#define JH5T_CONV_FREE 3450
+#define JH5T_CONV_INIT 3460
+#define JH5T_CSET_ASCII 3470
+#define JH5T_CSET_ERROR 3480
+#define JH5T_CSET_UTF8 3490
+#define JH5T_CSET_RESERVED_10 3500
+#define JH5T_CSET_RESERVED_11 3510
+#define JH5T_CSET_RESERVED_12 3520
+#define JH5T_CSET_RESERVED_13 3530
+#define JH5T_CSET_RESERVED_14 3540
+#define JH5T_CSET_RESERVED_15 3550
+#define JH5T_CSET_RESERVED_2 3560
+#define JH5T_CSET_RESERVED_3 3570
+#define JH5T_CSET_RESERVED_4 3580
+#define JH5T_CSET_RESERVED_5 3590
+#define JH5T_CSET_RESERVED_6 3600
+#define JH5T_CSET_RESERVED_7 3610
+#define JH5T_CSET_RESERVED_8 3620
+#define JH5T_CSET_RESERVED_9 3630
+#define JH5T_DIR_ASCEND 3640
+#define JH5T_DIR_DEFAULT 3650
+#define JH5T_DIR_DESCEND 3660
+#define JH5T_ENUM 3670
+#define JH5T_FLOAT 3680
+#define JH5T_FORTRAN_S1 3690
+#define JH5T_IEEE_F32BE 3700
+#define JH5T_IEEE_F32LE 3710
+#define JH5T_IEEE_F64BE 3720
+#define JH5T_IEEE_F64LE 3730
+#define JH5T_INTEGER 3740
+#define JH5T_INTEL_B16 3750
+#define JH5T_INTEL_B32 3760
+#define JH5T_INTEL_B64 3770
+#define JH5T_INTEL_B8 3780
+#define JH5T_INTEL_F32 3790
+#define JH5T_INTEL_F64 3800
+#define JH5T_INTEL_I16 3810
+#define JH5T_INTEL_I32 3820
+#define JH5T_INTEL_I64 3830
+#define JH5T_INTEL_I8 3840
+#define JH5T_INTEL_U16 3850
+#define JH5T_INTEL_U32 3860
+#define JH5T_INTEL_U64 3870
+#define JH5T_INTEL_U8 3880
+#define JH5T_MIPS_B16 3890
+#define JH5T_MIPS_B32 3900
+#define JH5T_MIPS_B64 3910
+#define JH5T_MIPS_B8 3920
+#define JH5T_MIPS_F32 3930
+#define JH5T_MIPS_F64 3940
+#define JH5T_MIPS_I16 3950
+#define JH5T_MIPS_I32 3960
+#define JH5T_MIPS_I64 3970
+#define JH5T_MIPS_I8 3980
+#define JH5T_MIPS_U16 3990
+#define JH5T_MIPS_U32 4000
+#define JH5T_MIPS_U64 4010
+#define JH5T_MIPS_U8 4020
+#define JH5T_NATIVE_B16 4030
+#define JH5T_NATIVE_B32 4040
+#define JH5T_NATIVE_B64 4050
+#define JH5T_NATIVE_B8 4060
+#define JH5T_NATIVE_CHAR 4070
+#define JH5T_NATIVE_DOUBLE 4080
+#define JH5T_NATIVE_FLOAT 4090
+#define JH5T_NATIVE_HADDR 4100
+#define JH5T_NATIVE_HBOOL 4110
+#define JH5T_NATIVE_HERR 4120
+#define JH5T_NATIVE_HSIZE 4130
+#define JH5T_NATIVE_HSSIZE 4140
+#define JH5T_NATIVE_INT 4150
+#define JH5T_NATIVE_INT_FAST16 4160
+#define JH5T_NATIVE_INT_FAST32 4170
+#define JH5T_NATIVE_INT_FAST64 4180
+#define JH5T_NATIVE_INT_FAST8 4190
+#define JH5T_NATIVE_INT_LEAST16 4200
+#define JH5T_NATIVE_INT_LEAST32 4210
+#define JH5T_NATIVE_INT_LEAST64 4220
+#define JH5T_NATIVE_INT_LEAST8 4230
+#define JH5T_NATIVE_INT16 4240
+#define JH5T_NATIVE_INT32 4250
+#define JH5T_NATIVE_INT64 4260
+#define JH5T_NATIVE_INT8 4270
+#define JH5T_NATIVE_LDOUBLE 4280
+#define JH5T_NATIVE_LLONG 4290
+#define JH5T_NATIVE_LONG 4300
+#define JH5T_NATIVE_OPAQUE 4310
+#define JH5T_NATIVE_SCHAR 4320
+#define JH5T_NATIVE_SHORT 4330
+#define JH5T_NATIVE_UCHAR 4340
+#define JH5T_NATIVE_UINT 4350
+#define JH5T_NATIVE_UINT_FAST16 4360
+#define JH5T_NATIVE_UINT_FAST32 4370
+#define JH5T_NATIVE_UINT_FAST64 4380
+#define JH5T_NATIVE_UINT_FAST8 4390
+#define JH5T_NATIVE_UINT_LEAST16 4400
+#define JH5T_NATIVE_UINT_LEAST32 4410
+#define JH5T_NATIVE_UINT_LEAST64 4420
+#define JH5T_NATIVE_UINT_LEAST8 4430
+#define JH5T_NATIVE_UINT16 4440
+#define JH5T_NATIVE_UINT32 4450
+#define JH5T_NATIVE_UINT64 4460
+#define JH5T_NATIVE_UINT8 4470
+#define JH5T_NATIVE_ULLONG 4480
+#define JH5T_NATIVE_ULONG 4490
+#define JH5T_NATIVE_USHORT 4500
+#define JH5T_NCLASSES 4510
+#define JH5T_NO_CLASS 4520
+#define JH5T_NORM_ERROR 4530
+#define JH5T_NORM_IMPLIED 4540
+#define JH5T_NORM_MSBSET 4550
+#define JH5T_NORM_NONE 4560
+#define JH5T_NPAD 4570
+#define JH5T_NSGN 4580
+#define JH5T_OPAQUE 4590
+#define JH5T_OPAQUE_TAG_MAX 4595
+#define JH5T_ORDER_BE 4600
+#define JH5T_ORDER_ERROR 4610
+#define JH5T_ORDER_LE 4620
+#define JH5T_ORDER_NONE 4630
+#define JH5T_ORDER_VAX 4640
+#define JH5T_PAD_BACKGROUND 4650
+#define JH5T_PAD_ERROR 4660
+#define JH5T_PAD_ONE 4670
+#define JH5T_PAD_ZERO 4680
+#define JH5T_PERS_DONTCARE 4690
+#define JH5T_PERS_HARD 4700
+#define JH5T_PERS_SOFT 4710
+#define JH5T_REFERENCE 4720
+#define JH5T_SGN_2 4730
+#define JH5T_SGN_ERROR 4740
+#define JH5T_SGN_NONE 4750
+#define JH5T_STD_B16BE 4760
+#define JH5T_STD_B16LE 4770
+#define JH5T_STD_B32BE 4780
+#define JH5T_STD_B32LE 4790
+#define JH5T_STD_B64BE 4800
+#define JH5T_STD_B64LE 4810
+#define JH5T_STD_B8BE 4820
+#define JH5T_STD_B8LE 4830
+#define JH5T_STD_I16BE 4840
+#define JH5T_STD_I16LE 4850
+#define JH5T_STD_I32BE 4860
+#define JH5T_STD_I32LE 4870
+#define JH5T_STD_I64BE 4880
+#define JH5T_STD_I64LE 4890
+#define JH5T_STD_I8BE 4900
+#define JH5T_STD_I8LE 4910
+#define JH5T_STD_REF_DSETREG 4920
+#define JH5T_STD_REF_OBJ 4930
+#define JH5T_STD_U16BE 4940
+#define JH5T_STD_U16LE 4950
+#define JH5T_STD_U32BE 4960
+#define JH5T_STD_U32LE 4970
+#define JH5T_STD_U64BE 4980
+#define JH5T_STD_U64LE 4990
+#define JH5T_STD_U8BE 5000
+#define JH5T_STD_U8LE 5010
+#define JH5T_STR_ERROR 5020
+#define JH5T_STR_NULLPAD 5030
+#define JH5T_STR_NULLTERM 5040
+#define JH5T_STR_RESERVED_10 5050
+#define JH5T_STR_RESERVED_11 5060
+#define JH5T_STR_RESERVED_12 5070
+#define JH5T_STR_RESERVED_13 5080
+#define JH5T_STR_RESERVED_14 5090
+#define JH5T_STR_RESERVED_15 5100
+#define JH5T_STR_RESERVED_3 5110
+#define JH5T_STR_RESERVED_4 5120
+#define JH5T_STR_RESERVED_5 5130
+#define JH5T_STR_RESERVED_6 5140
+#define JH5T_STR_RESERVED_7 5150
+#define JH5T_STR_RESERVED_8 5160
+#define JH5T_STR_RESERVED_9 5170
+#define JH5T_STR_SPACEPAD 5180
+#define JH5T_STRING 5190
+#define JH5T_TIME 5200
+#define JH5T_UNIX_D32BE 5210
+#define JH5T_UNIX_D32LE 5220
+#define JH5T_UNIX_D64BE 5230
+#define JH5T_UNIX_D64LE 5240
+#define JH5T_VARIABLE 5245
+#define JH5T_VLEN 5250
+#define JH5Z_CB_CONT 5260
+#define JH5Z_CB_ERROR 5270
+#define JH5Z_CB_FAIL 5280
+#define JH5Z_CB_NO 5290
+#define JH5Z_DISABLE_EDC 5300
+#define JH5Z_ENABLE_EDC 5310
+#define JH5Z_ERROR_EDC 5320
+#define JH5Z_FILTER_DEFLATE 5330
+#define JH5Z_FILTER_ERROR 5340
+#define JH5Z_FILTER_FLETCHER32 5350
+#define JH5Z_FILTER_MAX 5360
+#define JH5Z_FILTER_NONE 5370
+#define JH5Z_FILTER_RESERVED 5380
+#define JH5Z_FILTER_SHUFFLE 5390
+#define JH5Z_FILTER_SZIP 5400
+#define JH5Z_FLAG_DEFMASK 5410
+#define JH5Z_FLAG_INVMASK 5420
+#define JH5Z_FLAG_MANDATORY 5430
+#define JH5Z_FLAG_OPTIONAL 5440
+#define JH5Z_FLAG_REVERSE 5450
+#define JH5Z_FLAG_SKIP_EDC 5460
+#define JH5Z_MAX_NFILTERS 5470
+#define JH5Z_NO_EDC 5480
+#define JH5Z_SO_INT 5481
+#define JH5Z_SO_FLOAT_DSCALE 5482
+#define JH5Z_SO_FLOAT_ESCALE 5483
+#define JH5Z_FILTER_CONFIG_ENCODE_ENABLED 5490
+#define JH5Z_FILTER_CONFIG_DECODE_ENABLED 5500
\ No newline at end of file
diff --git a/source/c/jhdf5/h5ImpJHDF5.c b/source/c/jhdf5/h5ImpJHDF5.c
new file mode 100755
index 0000000..325ec24
--- /dev/null
+++ b/source/c/jhdf5/h5ImpJHDF5.c
@@ -0,0 +1,253 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  general library functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#include "hdf5.h"
+#include <jni.h>
+#include <jni.h>
+/*
+#include <signal.h>
+*/
+
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+extern jboolean h5raiseException( JNIEnv *env, char *exception, char *message);
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5open
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5open
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+    retVal =  H5open();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5close
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5close
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+    retVal =  H5close();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5dont_atexit
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5dont_1atexit
+  (JNIEnv *env, jclass clss)
+{
+    int retVal = H5dont_atexit();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5get_libversion
+ * Signature: ([I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5get_1libversion
+  (JNIEnv *env, jclass clss, jintArray libversion)
+{
+    unsigned *theArray = NULL;
+    jboolean isCopy;
+    int status;
+
+    if (libversion == NULL) {
+        h5nullArgument( env, "H5get_version:  libversion is NULL");
+    }
+#ifdef __cplusplus
+    theArray = (unsigned *)env->GetIntArrayElements(libversion,&isCopy);
+#else
+    theArray = (unsigned *)(*env)->GetIntArrayElements(env,libversion,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5get_libversion:  input not pinned");
+        return -1;
+    }
+
+    status =  H5get_libversion(&(theArray[0]), &(theArray[1]), &(theArray[2]));
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(libversion,(jint *)theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,libversion,(jint *)theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(libversion,(jint *)theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,libversion,(jint *)theArray,0);
+#endif
+    }
+    return (jint)status;
+}
+
+#ifdef notdef
+/*
+ struct sigaction {
+   int sa_flags;
+     void (*sa_handler)();
+     sigset_t sa_mask;
+     void (*sa_sigaction)(int, siginfo_t *, void *);
+};
+int sigaction(int sig, struct sigaction *act,                                                        struct sigaction *oact);
+*/
+void catch_abrt()
+{
+    /*  Raise Java exception */
+    printf("raise exception....\n");
+}
+#endif
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5check_version
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5check_1version
+  (JNIEnv *env, jclass clss, jint majnum, jint minnum, jint relnum)
+{
+    int status;
+/*
+ *   In principle, we want to catch the 'abort' signal, and
+ *  do something other than crash.
+ *   Look up how to do this portably.
+ */
+/*
+    int res;
+    struct sigaction ctchit;
+    struct sigaction old;
+    ctchit.sa_handler = catch_abrt;
+*/
+
+/*
+    res = sigaction(SIGABRT, &ctchit, &old);
+    if (res != 0) {
+        printf("sigaction failed\n");
+        return(-1);
+    }
+*/
+    /*  catch the signal? */
+    status = H5check_version((unsigned)majnum, (unsigned)minnum, (unsigned)relnum);
+/*
+    res = sigaction(SIGABRT, &old, 0);
+    if (res != 0) {
+        printf("sigaction failed\n");
+        return(-1);
+    }
+*/
+    return status;
+}
+
+
+/*
+ *  This is the only routine from H5E currently implemente, so
+ *  there is no separate file h5eImp.c
+ */
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5check_version
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Eclear
+  (JNIEnv *env, jclass clss )
+{
+    herr_t res = -1;
+    res = H5Eclear(H5E_DEFAULT) ;
+    if (res < 0) {
+        h5raiseException( env,
+        "ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException",
+        "H5Eclear Failed");
+
+    }
+    return (jint) res;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5garbage_collect
+ * Signature: ()I
+ *
+ *  ** New in HDF5.1.2.2:  if linking with earlier version
+ *     of HDF5, configure with --enable-hdf5_1_2_1
+ *
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5garbage_1collect
+  (JNIEnv *env, jclass clss)
+{
+    herr_t retVal = -1;
+#ifndef USE_H5_1_2_1
+    retVal =  H5garbage_collect();
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+#endif
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5set_free_list_limits(int reg_global_lim, int reg_list_lim,
+ *                int arr_global_lim, int arr_list_lim, int blk_global_lim,
+ *                int blk_list_lim )
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5set_1free_1list_1limits
+  (JNIEnv *env, jclass clss,jint reg_global_lim, jint reg_list_lim,
+  jint arr_global_lim, jint arr_list_lim, jint blk_global_lim, jint blk_list_lim )
+{
+    int retVal = H5set_free_list_limits((int)reg_global_lim, (int)reg_list_lim,
+        (int)arr_global_lim, (int)arr_list_lim, (int)blk_global_lim, (int)blk_list_lim);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return retVal;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5aImpJHDF5.c b/source/c/jhdf5/h5aImpJHDF5.c
new file mode 100755
index 0000000..16282f2
--- /dev/null
+++ b/source/c/jhdf5/h5aImpJHDF5.c
@@ -0,0 +1,1178 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Attribute API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include "h5utilJHDF5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+herr_t H5AreadVL_str_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AreadVL_num_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+herr_t H5AreadVL_comp_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf);
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aexists
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aexists
+  (JNIEnv *env, jclass clss, jint obj_id, jstring attribute_name)
+{
+    htri_t exists;
+    char *aName;
+    jboolean isCopy;
+
+    if (attribute_name == NULL) {
+        h5nullArgument( env, "H5Aexists:  attribute_name is NULL");
+        return -1;
+    }
+
+    aName = (char *)(*env)->GetStringUTFChars(env,attribute_name,&isCopy);
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Aexists:  attribute_name not pinned");
+        return -1;
+    }
+
+    exists = H5Aexists( (hid_t) obj_id, aName );
+    if (exists < 0)
+    {
+        h5libraryError(env);
+    }
+
+    (*env)->ReleaseStringUTFChars(env,attribute_name,aName);
+    
+    return exists;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Acreate
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Acreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint create_plist_id, jint access_plist_id)
+{
+    herr_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Acreate:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    aName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    aName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (aName == NULL) {
+        h5JNIFatalError( env, "H5Acreate: aName is not pinned");
+        return -1;
+    }
+
+    status = H5Acreate((hid_t)loc_id, aName, (hid_t)type_id,
+        (hid_t)space_id, (hid_t)create_plist_id, (hid_t)access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,aName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,aName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aopen_name
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aopen_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env,"H5Aopen_name:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    aName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    aName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (aName == NULL) {
+        h5JNIFatalError( env,"H5Aopen_name: name is not pinned");
+        return -1;
+    }
+
+    status = H5Aopen_name((hid_t)loc_id, aName);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,aName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,aName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aopen_idx
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aopen_1idx
+  (JNIEnv *env, jclass clss, jint loc_id, jint idx)
+{
+    herr_t retVal = -1;
+    retVal =  H5Aopen_idx((hid_t)loc_id, (unsigned int) idx );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3B
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetByteArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetByteArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[S)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3S
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jshortArray buf)
+{
+    herr_t status;
+    jshort *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetShortArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetShortArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseShortArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseShortArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3I
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jintArray buf)
+{
+    herr_t status;
+    jint *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetIntArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetIntArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseIntArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseIntArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3J
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jlongArray buf)
+{
+    herr_t status;
+    jlong *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetLongArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetLongArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseLongArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseLongArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[F)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3F
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jfloatArray buf)
+{
+    herr_t status;
+    jfloat *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetFloatArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetFloatArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseFloatArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseFloatArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Awrite
+ * Signature: (II[D)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Awrite__II_3D
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jdoubleArray buf)
+{
+    herr_t status;
+    jdouble *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Awrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetDoubleArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetDoubleArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Awrite: buf is not pinned");
+        return -1;
+    }
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseDoubleArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseDoubleArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/* 
+ * Class:     ncsa_hdf_hdf5lib_H5 
+ * Method:    H5AwriteString 
+ * Signature: (II[B)I 
+ */ 
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5AwriteString 
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jobjectArray buf) 
+{ 
+    herr_t status; 
+    jboolean isCopy; 
+    char* * wdata; 
+    jsize size; 
+    jint i, j; 
+
+    if ( buf == NULL ) { 
+        h5nullArgument( env, "H5AwriteString:  buf is NULL"); 
+        return -1; 
+    } 
+
+    size = (*env)->GetArrayLength(env, (jarray) buf); 
+    wdata = malloc(size * sizeof (char *)); 
+
+    if (!wdata) { 
+        h5outOfMemory( env, "H5AwriteString:  cannot allocate buffer"); 
+        return -1; 
+    } 
+
+    memset(wdata, 0, size * sizeof(char *)); 
+
+    for (i = 0; i < size; ++i) { 
+        jstring obj = (jstring) (*env)->GetObjectArrayElement(env, (jobjectArray) buf, i); 
+        if (obj != 0) { 
+            jsize length = (*env)->GetStringUTFLength(env, obj); 
+            const char * utf8 = (*env)->GetStringUTFChars(env, obj, 0); 
+                        
+            if (utf8) { 
+                wdata[i] = malloc(strlen(utf8)+1); 
+                if (!wdata[i]) { 
+                    status = -1; 
+                    // can't allocate memory, cleanup 
+                    for (j = 0; j < i; ++i) { 
+                        if(wdata[j]) { 
+                            free(wdata[j]); 
+                        } 
+                    } 
+                    free(wdata); 
+
+                    (*env)->ReleaseStringUTFChars(env, obj, utf8); 
+                    (*env)->DeleteLocalRef(env, obj); 
+
+                    h5outOfMemory( env, "H5DwriteString:  cannot allocate buffer"); 
+                    return -1; 
+                } 
+
+                strcpy(wdata[i], utf8); 
+            } 
+
+            (*env)->ReleaseStringUTFChars(env, obj, utf8); 
+            (*env)->DeleteLocalRef(env, obj); 
+        } 
+    } 
+
+    status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, wdata); 
+
+    // now free memory 
+    for (i = 0; i < size; ++i) { 
+        if(wdata[i]) { 
+            free(wdata[i]); 
+        } 
+    } 
+    free(wdata); 
+
+    if (status < 0) { 
+        h5libraryError(env); 
+    } 
+    return (jint)status; 
+} 
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3B
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetByteArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetByteArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseByteArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[S)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3S
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jshortArray buf)
+{
+    herr_t status;
+    jshort *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetShortArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetShortArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseShortArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseShortArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseShortArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseShortArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3I
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jintArray buf)
+{
+    herr_t status;
+    jint *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetIntArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetIntArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3J
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jlongArray buf)
+{
+    herr_t status;
+    jlong *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetLongArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetLongArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[F)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3F
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jfloatArray buf)
+{
+    herr_t status;
+    jfloat *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetFloatArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetFloatArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseFloatArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseFloatArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseFloatArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseFloatArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[D)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aread__II_3D
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jdoubleArray buf)
+{
+    herr_t status;
+    jdouble *byteP;
+
+    if (buf == NULL) {
+        h5nullArgument( env,"H5Aread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetDoubleArrayElements(buf,NULL);
+#else
+    byteP = (*env)->GetDoubleArrayElements(env,buf,NULL);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env,"H5Aread: buf is not pinned");
+        return -1;
+    }
+
+    status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(buf,byteP,0);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_space
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aget_1space
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Aget_space((hid_t)attr_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aget_1type
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Aget_type((hid_t)attr_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_name
+ * Signature: (IJLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aget_1name
+  (JNIEnv *env, jclass clss, jint attr_id, jlong buf_size, jobjectArray name)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    long bs;
+
+    bs = (long)buf_size;
+    if (bs == 0)
+    {
+        /* we are only supposed to find out the size */
+        size = H5Aget_name((hid_t)attr_id, 0, NULL);
+        if (size < 0) {
+            h5libraryError(env);
+            return -1;
+        } else
+        {
+            return size;
+        }
+    }
+    if (bs <= 0) {
+        h5badArgument( env, "H5Aget_name:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Aget_name:  malloc failed");
+        return -1;
+    }
+    size = H5Aget_name((hid_t)attr_id, (size_t)buf_size, aName);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        return -1;
+    }
+    /* successful return -- save the string; */
+#ifdef __cplusplus
+    str = env->NewStringUTF(aName);
+#else
+    str = (*env)->NewStringUTF(env,aName);
+#endif
+    if (str == NULL) {
+        free(aName);
+        h5JNIFatalError( env,"H5Aget_name:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+#ifdef __cplusplus
+    env->SetObjectArrayElement(name,0,str);
+#else
+    (*env)->SetObjectArrayElement(env,name,0,str);
+#endif
+
+    return (jlong)size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aget_num_attrs
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aget_1num_1attrs
+  (JNIEnv *env, jclass clss, jint loc_id)
+{
+    int retVal = -1;
+    retVal =  H5Aget_num_attrs((hid_t)loc_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Adelete
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Adelete
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* aName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env,"H5Adelete:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    aName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    aName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (aName == NULL) {
+        h5JNIFatalError( env,"H5Adelete: name is not pinned");
+        return -1;
+    }
+
+    status = H5Adelete((hid_t)loc_id, aName );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,aName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,aName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Aclose
+  (JNIEnv *env, jclass clss, jint attr_id)
+{
+    herr_t retVal = 0;
+
+    if (attr_id > 0)
+        retVal =  H5Aclose((hid_t)attr_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Aread
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5AreadVL
+  (JNIEnv *env, jclass clss, jint attr_id, jint mem_type_id, jobjectArray buf)
+{
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5AreadVL:  buf is NULL");
+        return -1;
+    }
+
+    if (H5Tis_variable_str((hid_t)mem_type_id) > 0)
+    {
+        return (jint) H5AreadVL_str_jhdf5 (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND)
+    {
+        return (jint) H5AreadVL_comp_jhdf5 (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+    else
+    {
+        return (jint) H5AreadVL_num_jhdf5 (env, (hid_t)attr_id, (hid_t)mem_type_id, buf);
+    }
+}
+
+herr_t H5AreadVL_num_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t status;
+    int i, n;
+    size_t max_len=0;
+    h5str_t h5str;
+    jstring jstr;
+    hvl_t *rdata;
+    size_t size;
+    hid_t sid;
+    hsize_t dims[H5S_MAX_RANK];
+
+    n = (*env)->GetArrayLength(env, buf);
+    rdata = (hvl_t *)calloc(n, sizeof(hvl_t));
+    if (rdata == NULL) {
+        h5outOfMemory( env, "H5AreadVL:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, rdata);
+    dims[0] = n;
+    sid = H5Screate_simple(1, dims, NULL);
+
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+        H5Sclose(sid);
+        free(rdata);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        if ((rdata+i)->len > max_len)
+            max_len = (rdata+i)->len;
+    }
+
+    size = H5Tget_size(tid);
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new_jhdf5(&h5str, 4*size);
+
+    if (h5str.s == NULL)
+    {
+        H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+        H5Sclose(sid);
+        free(rdata);
+        h5JNIFatalError( env, "H5AreadVL:  failed to allocate string buf");
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        h5str.s[0] = '\0';
+        h5str_sprintf_jhdf5(&h5str, aid, tid, rdata+i);
+        jstr = (*env)->NewStringUTF(env, h5str.s);
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+    }
+
+    h5str_free_jhdf5(&h5str); 
+    H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, rdata);
+    H5Sclose(sid);
+
+    if (rdata)
+        free(rdata);
+
+    return status;
+}
+
+herr_t H5AreadVL_comp_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t status;
+    int i, n;
+    size_t max_len=0;
+    h5str_t h5str;
+    jstring jstr;
+    char *rdata;
+    size_t size;
+
+    size = H5Tget_size(tid);
+    n = (*env)->GetArrayLength(env, buf);
+    rdata = (char *)malloc(n*size);
+
+    if (rdata == NULL) {
+        h5outOfMemory( env, "H5AreadVL:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, rdata);
+
+    if (status < 0) {
+        free(rdata);
+        h5libraryError(env);
+        return -1;
+    }
+
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new_jhdf5(&h5str, 4*size);
+
+    if (h5str.s == NULL)
+    {
+        free(rdata);
+        h5outOfMemory( env, "H5AreadVL:  failed to allocate string buf");
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        h5str.s[0] = '\0';
+        h5str_sprintf_jhdf5(&h5str, aid, tid, rdata+i*size);
+        jstr = (*env)->NewStringUTF(env, h5str.s);
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+    }
+
+    h5str_free_jhdf5(&h5str); 
+    free(rdata);
+
+    return status;
+}
+
+herr_t H5AreadVL_str_jhdf5 (JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf)
+{
+    herr_t status=-1;
+    jstring jstr;
+    char **strs;
+    int i, n;
+    hsize_t dims[H5S_MAX_RANK];
+
+    n = (*env)->GetArrayLength(env, buf);
+    strs =(char **)calloc(n, sizeof(char *));
+
+    if (strs == NULL)
+    {
+        h5outOfMemory( env, "H5AreadVL:  failed to allocate buff for read variable length strings");
+        return -1;
+    }
+
+    status = H5Aread(aid, tid, strs);
+    if (status < 0) {
+        for (i=0; i<n; i++)
+        {
+            if (strs[i] != NULL)
+            {
+                free(strs[i]);
+            }
+        }
+        free(strs);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        jstr = (*env)->NewStringUTF(env, strs[i]);
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+        free(strs[i]);
+    }
+        
+    free(strs);
+
+    return status;
+}
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Acopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Acopy
+  (JNIEnv *env, jclass clss, jint src_id, jint dst_id)
+{
+    jbyte *buf;
+    herr_t retVal = -1;
+    hid_t src_did = (hid_t)src_id;
+    hid_t dst_did = (hid_t)dst_id;
+    hid_t tid=-1;
+    hid_t sid=-1;
+    hsize_t total_size = 0;
+
+
+    sid = H5Aget_space(src_did);
+    if (sid < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    tid = H5Aget_type(src_did);
+    if (tid < 0) {
+        H5Sclose(sid);
+        h5libraryError(env);
+        return -1;
+    }
+
+    total_size = H5Sget_simple_extent_npoints(sid) * H5Tget_size(tid);
+
+    H5Sclose(sid);
+
+    buf = (jbyte *)malloc( (int) (total_size * sizeof(jbyte)));
+    if (buf == NULL) {
+    H5Tclose(tid);
+        h5outOfMemory( env, "H5Acopy:  malloc failed");
+        return -1;
+    }
+
+    retVal = H5Aread(src_did, tid, buf);
+    H5Tclose(tid);
+
+    if (retVal < 0) {
+        free(buf);
+        h5libraryError(env);
+        return (jint)retVal;
+    }
+
+    tid = H5Aget_type(dst_did);
+    if (tid < 0) {
+        free(buf);
+        h5libraryError(env);
+        return -1;
+    }
+    retVal = H5Awrite(dst_did, tid, buf);
+    H5Tclose(tid);
+    free(buf);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5dImpJHDF5.c b/source/c/jhdf5/h5dImpJHDF5.c
new file mode 100755
index 0000000..7d887dc
--- /dev/null
+++ b/source/c/jhdf5/h5dImpJHDF5.c
@@ -0,0 +1,1507 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Dataset Object API Functions of HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/&isCopy/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include "h5utilJHDF5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+herr_t H5DreadVL_str_jhdf5 (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+herr_t H5DreadVL_num_jhdf5 (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dcreate
+ * Signature: (ILjava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dcreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id,
+  jint space_id, jint link_create_plist_id, jint dset_create_plist_id, 
+  jint dset_access_plist_id)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Dcreate:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dcreate:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Dcreate((hid_t)loc_id, file, (hid_t)type_id, 
+        (hid_t)space_id, (hid_t)link_create_plist_id, 
+    (hid_t) dset_create_plist_id, (hid_t)dset_access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dchdir_ext
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dchdir_1ext
+  (JNIEnv *env, jclass clss, jstring dir_name)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (dir_name == NULL) {
+        h5nullArgument( env, "H5Dchdir_ext:  dir_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(dir_name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,dir_name,&isCopy);
+#endif
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dchdir_ext:  file dir not pinned");
+        return -1;
+    }
+    status = chdir ( file );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(dir_name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,dir_name,file);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dgetdir_1ext
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dgetdir_1ext
+  (JNIEnv *env, jclass clss, jstring dir_name, jint buf_size)
+{
+    char *aName;
+    jstring str;
+
+    if (buf_size <= 0) {
+        h5badArgument( env, "H5Dgetcwd:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*buf_size);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Dgetcwd:  malloc failed");
+        return -1;
+    }
+    getcwd( (char *)aName, (size_t)buf_size);
+
+    str = (*env)->NewStringUTF(env,aName);
+
+    if (str == NULL) {
+        free(aName);
+        h5outOfMemory( env,"H5Dgetcwd:  return string failed");
+        return -1;
+    }
+    free(aName);
+
+    (*env)->SetObjectArrayElement(env,dir_name,0,str);
+
+    return 0;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dopen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Dopen:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Dopen:  file name not pinned");
+        return -1;
+    }
+    status = H5Dopen((hid_t)loc_id, file, (hid_t) access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_space
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dget_1space
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Dget_space((hid_t)dataset_id );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dget_1type
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+    retVal = H5Dget_type((hid_t)dataset_id );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dget_1create_1plist
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Dget_create_plist((hid_t)dataset_id );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread
+ * Signature: (IIIII[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    byteP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,byteP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,byteP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dwrite
+ * Signature: (IIIII[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    byteP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    byteP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, byteP);
+
+#ifdef __cplusplus
+    env->ReleasePrimitiveArrayCritical(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleasePrimitiveArrayCritical(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dextend
+ * Signature: (IB)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dextend
+  (JNIEnv *env, jclass clss, jint dataset_id, jbyteArray size)
+{
+    herr_t status;
+    jbyte *P;
+    jboolean isCopy;
+    hsize_t *sa;
+    int i;
+    int rank;
+    hsize_t *lp;
+    jlong *jlp;
+
+    if ( size == NULL ) {
+        h5nullArgument( env, "H5Dextend:  array of sizes is NULL");
+        return -1;
+    }
+    /*
+     *  Future:  check that the array has correct
+     *           rank (same as dataset dataset_id)
+     */
+#ifdef __cplusplus
+    P = env->GetByteArrayElements(size,&isCopy);
+#else
+    P = (*env)->GetByteArrayElements(env,size,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError( env, "H5Dextend:  array not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    i = env->GetArrayLength(size);
+#else
+    i = (*env)->GetArrayLength(env,size);
+#endif
+    rank = i / sizeof(jlong);
+    sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (sa == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(size,P,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,size,P,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Dextend:  size not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)P;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Dextend((hid_t)dataset_id, (hsize_t *)sa);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(size,P,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,size,P,JNI_ABORT);
+#endif
+    free(sa);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dset_1extent
+ * Signature: (IB)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dset_1extent
+  (JNIEnv *env, jclass clss, jint dataset_id, jbyteArray size)
+{
+    herr_t status;
+    jbyte *P;
+    jboolean isCopy;
+    hsize_t *sa;
+    int i;
+    int rank;
+    hsize_t *lp;
+    jlong *jlp;
+
+    if ( size == NULL ) {
+        h5nullArgument( env, "H5Dextend:  array of sizes is NULL");
+        return -1;
+    }
+    /*
+     *  Future:  check that the array has correct
+     *           rank (same as dataset dataset_id)
+     */
+#ifdef __cplusplus
+    P = env->GetByteArrayElements(size,&isCopy);
+#else
+    P = (*env)->GetByteArrayElements(env,size,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError( env, "H5Dextend:  array not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    i = env->GetArrayLength(size);
+#else
+    i = (*env)->GetArrayLength(env,size);
+#endif
+    rank = i / sizeof(jlong);
+    sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (sa == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(size,P,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,size,P,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Dextend:  size not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)P;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Dset_extent((hid_t)dataset_id, (hsize_t *)sa);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(size,P,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,size,P,JNI_ABORT);
+#endif
+    free(sa);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dclose
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hid_t retVal = 0;
+
+    if (dataset_id > 0)
+        retVal =  H5Dclose((hid_t)dataset_id );
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_storage_size
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dget_1storage_1size
+  (JNIEnv *env, jclass clss, jint dataset_id)
+{
+    hsize_t retVal = (hsize_t)-1;
+    retVal =  H5Dget_storage_size((hid_t)dataset_id );
+/* probably returns '0' if fails--don't do an exception
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+*/
+    return (jlong)retVal;
+}
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dcopy
+  (JNIEnv *env, jclass clss, jint src_id, jint dst_id)
+{
+    jbyte *buf;
+    herr_t retVal = -1;
+    hid_t src_did = (hid_t)src_id;
+    hid_t dst_did = (hid_t)dst_id;
+    hid_t tid=-1;
+    hid_t sid=-1;
+    hsize_t total_size = 0;
+
+
+    sid = H5Dget_space(src_did);
+    if (sid < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    tid = H5Dget_type(src_did);
+    if (tid < 0) {
+        H5Sclose(sid);
+        h5libraryError(env);
+        return -1;
+    }
+
+    total_size = H5Sget_simple_extent_npoints(sid) *
+            H5Tget_size(tid);
+
+    H5Sclose(sid);
+
+    buf = (jbyte *)calloc( (int) (total_size), sizeof(jbyte));
+        if (buf == NULL) {
+        H5Tclose(tid);
+                h5outOfMemory( env, "H5Dcopy:  malloc failed");
+                return -1;
+        }
+
+    retVal = H5Dread(src_did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+    H5Tclose(tid);
+
+    if (retVal < 0) {
+        free(buf);
+        h5libraryError(env);
+        return (jint)retVal;
+    }
+
+    tid = H5Dget_type(dst_did);
+    if (tid < 0) {
+        free(buf);
+        h5libraryError(env);
+        return -1;
+    }
+    retVal = H5Dwrite(dst_did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+    H5Tclose(tid);
+    free(buf);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_get_buf_size
+ * Signature: (III[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dvlen_1get_1buf_1size
+  (JNIEnv *env, jclass clss,
+jint dataset_id, jint type_id, jint space_id, jintArray size )
+{
+    herr_t status;
+    jint *P;
+    jboolean isCopy;
+    hsize_t sz;
+
+    if ( size == NULL ) {
+        h5nullArgument( env, "H5Dvlen_get_buf_size:  size is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    P = env->GetIntArrayElements(size,&isCopy);
+#else
+    P = (*env)->GetIntArrayElements(env,size,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError( env, "H5Dvlen_get_buf_size:  array not pinned");
+        return -1;
+    }
+
+    status = (jint)H5Dvlen_get_buf_size((hid_t) dataset_id,
+        (hid_t) type_id, (hid_t) space_id, (hsize_t *)&sz);
+
+
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,P,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,P,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        P[0] = (jint)sz;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,P,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,P,0);
+#endif
+    }
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dvlen_reclaim
+ * Signature: (IIIII[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dvlen_1reclaim
+  (JNIEnv *env, jclass clss, jint type_id, jint space_id,
+   jint xfer_plist_id, jbyteArray buf)
+{
+    herr_t status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    byteP = env->GetByteArrayElements(buf,&isCopy);
+#else
+    byteP = (*env)->GetByteArrayElements(env,buf,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dvlen_reclaim((hid_t)type_id,
+        (hid_t)space_id, (hid_t)xfer_plist_id, byteP);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(buf,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,buf,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/***************************************************************
+ *                   New APIs for HDF5.1.6                     *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dget_space_status(hid_t dset_id, H5D_space_status_t *status)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dget_1space_1status
+  (JNIEnv *env, jclass clss, jint dset_id, jintArray status)
+{
+    herr_t retVal = -1;
+    jint *theArray;
+    jboolean isCopy;
+    H5D_space_status_t space_status;
+
+
+    if (status == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Dget_space_status:  status is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jint *)env->GetIntArrayElements(status,&isCopy);
+#else
+    theArray = (jint *)(*env)->GetIntArrayElements(env,status,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Dget_space_status:  status not pinned");
+        return -1;
+    }
+
+    retVal =  H5Dget_space_status((hid_t)dset_id, &space_status );
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(status,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,status,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = space_status;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(status,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,status,theArray,0);
+#endif
+    }
+
+    return (jint)retVal;
+}
+
+
+/*
+    ////////////////////////////////////////////////////////////////////
+    //                                                                //
+    //         New APIs for read data from library                    //
+    //  Using H5Dread(..., Object buf) requires function calls        //
+    //  theArray.emptyBytes() and theArray.arrayify( buf), which      //
+    //  triples the actual memory needed by the data set.             //
+    //  Using the following APIs solves the problem.                  //
+    //                                                                //
+    ////////////////////////////////////////////////////////////////////
+*/
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Dread_short
+ * Signature: (IIIII[S)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread__IIIII_3S
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jshortArray buf)
+{
+    herr_t status;
+    jshort *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread__IIIII_3I
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jintArray buf)
+{
+    herr_t status;
+    jint *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread__IIIII_3J
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jlongArray buf)
+{
+    herr_t status;
+    jlong *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread__IIIII_3F
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jfloatArray buf)
+{
+    herr_t status;
+    jfloat *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread__IIIII_3D
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jdoubleArray buf)
+{
+    herr_t status;
+    jdouble *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dread:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dread:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,0);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread_1string
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jobjectArray j_buf)
+{
+    herr_t status;
+    jstring jstr;
+    char *c_buf;
+    char *cstr;
+    char cterm;
+    size_t str_len, i, n;
+    
+    c_buf = cstr = NULL;
+    if ( j_buf == NULL) {
+        h5nullArgument( env, "H5Dread_string:  buf is NULL");
+        return -1;
+    }
+
+    n = (*env)->GetArrayLength(env, j_buf);
+    if ( n<=0) {
+        h5nullArgument( env, "H5Dread_string:  buf length <=0");
+        return -1;
+    }
+
+    if ( (str_len = H5Tget_size((hid_t)mem_type_id)) <=0 ) {
+        h5libraryError(env);
+    }
+
+    /* Note: we need one additional character at the end of the buffer for the final '\0'. */
+    if ( (c_buf = (char *)calloc(n + 1, str_len)) == NULL) {
+        if (cstr) free (cstr); cstr = NULL;
+        h5outOfMemory(env,  "H5Dread_string: memory allocation failed.");
+        return -1;
+    }
+
+    status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, c_buf);
+
+    if (status < 0) {
+        if (c_buf) free (c_buf); c_buf = NULL;
+        h5libraryError(env);
+        return -1;
+    }
+
+    cstr = c_buf;
+    for (i=0; i<n; i++) {
+        cterm = *(cstr + str_len);
+        *(cstr + str_len) = '\0'; 
+        jstr = (*env)->NewStringUTF(env, cstr);
+        (*env)->SetObjectArrayElement(env, j_buf, i, jstr);
+        *(cstr + str_len) = cterm; 
+        cstr += str_len; 
+    }
+
+    if (c_buf)
+    {
+        free(c_buf);
+    }
+
+    return (jint)status;
+}
+
+
+/**
+ *  Read VLEN data into array of arrays.
+ *  Object[] buf contains VL arrays of data points
+ *  Currently only deal with variable length of atomic data types
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5DreadVL
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    htri_t isStr=0;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5DreadVL:  buf is NULL");
+        return -1;
+    }
+
+    if (H5Tget_class((hid_t)mem_type_id) == H5T_COMPOUND) {
+        hid_t nested_tid = H5Tget_member_type((hid_t)mem_type_id, 0);
+        while (H5Tget_class(nested_tid) == H5T_COMPOUND)
+        {
+            nested_tid = H5Tget_member_type(nested_tid, 0);
+        }
+        isStr = H5Tis_variable_str(nested_tid);
+    }
+    else {
+        isStr = H5Tis_variable_str((hid_t)mem_type_id);
+    }
+
+
+    if (isStr > 0)
+    {
+        return (jint) H5DreadVL_str_jhdf5 (env, (hid_t)dataset_id, (hid_t)mem_type_id, 
+            (hid_t) mem_space_id,(hid_t) file_space_id, (hid_t)xfer_plist_id, buf);
+    }
+    else if (isStr == 0)
+    {
+        return (jint) H5DreadVL_num_jhdf5 (env, (hid_t)dataset_id, (hid_t)mem_type_id, 
+            (hid_t) mem_space_id,(hid_t) file_space_id, (hid_t)xfer_plist_id, buf);
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+herr_t H5DreadVL_num_jhdf5 (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf)
+{
+    herr_t status;
+    int i, n;
+    size_t max_len=0;
+    h5str_t h5str;
+    jstring jstr;
+    hvl_t *rdata;
+    size_t size;
+
+    n = (*env)->GetArrayLength(env, buf);
+
+    rdata = (hvl_t *)calloc(n, sizeof(hvl_t));
+    if (rdata == NULL) {
+        h5outOfMemory( env, "H5DreadVL:  failed to allocate buff for read");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, rdata);
+
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, mem_sid, H5P_DEFAULT, rdata);
+        free(rdata);
+        h5libraryError(env);
+        return -1;
+    }
+
+    max_len = 1;
+    for (i=0; i<n; i++)
+    {
+        if ((rdata+i)->len > max_len)
+            max_len = (rdata+i)->len;
+    }
+
+    size = H5Tget_size(tid)*max_len;
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new_jhdf5(&h5str, 4*size);
+
+    if (h5str.s == NULL)
+    {
+        H5Dvlen_reclaim(tid, mem_sid, H5P_DEFAULT, rdata);
+        free(rdata);
+        h5outOfMemory( env, "H5DreadVL:  failed to allocate string buf");
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        h5str.s[0] = '\0';
+        h5str_sprintf_jhdf5(&h5str, did, tid, rdata+i);
+        jstr = (*env)->NewStringUTF(env, h5str.s);
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+    }
+
+    h5str_free_jhdf5(&h5str); 
+    H5Dvlen_reclaim(tid, mem_sid, H5P_DEFAULT, rdata);
+    free(rdata);
+
+    return status;
+}
+
+herr_t H5DreadVL_str_jhdf5 (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf)
+{
+    herr_t status=-1;
+    jstring jstr;
+    char **strs;
+    int i, n;
+
+    n = (*env)->GetArrayLength(env, buf);
+    strs = (char **) calloc(n, sizeof(char *));
+
+    if (strs == NULL)
+    {
+        h5outOfMemory( env, "H5DreadVL:  failed to allocate buff for read variable length strings");
+        return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, strs);
+
+    if (status < 0) {
+        H5Dvlen_reclaim(tid, mem_sid, H5P_DEFAULT, strs);
+        free(strs);
+        h5libraryError(env);
+        return -1;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        jstr = (*env)->NewStringUTF(env, strs[i]);
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+    }
+        
+    H5Dvlen_reclaim(tid, mem_sid, H5P_DEFAULT, strs);
+    free(strs);
+
+    return status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dread_1reg_1ref (JNIEnv *env, jclass clss, 
+    jint dataset_id, jint mem_type_id, jint mem_space_id,
+    jint file_space_id, jint xfer_plist_id, jobjectArray buf)
+{
+    herr_t status;
+    int i, n;
+    h5str_t h5str;
+    jstring jstr;
+
+    hdset_reg_ref_t *ref_data;
+    size_t size;
+
+    hid_t region = -1;
+    hid_t did = (hid_t) dataset_id;
+    hid_t tid = (hid_t) mem_type_id;
+    hid_t mem_sid = (hid_t) mem_space_id;
+    hid_t file_sid = (hid_t) file_space_id;
+
+    n = (*env)->GetArrayLength(env, buf);
+    size = sizeof(hdset_reg_ref_t); /*H5Tget_size(tid);*/
+    ref_data = calloc(size, n);
+
+    if (ref_data == NULL) {
+       h5outOfMemory( env, "H5Dread_reg_ref:  failed to allocate buff for read");
+       return -1;
+    }
+
+    status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, ref_data);
+
+    if (status < 0) {
+        free(ref_data);
+        h5libraryError(env);
+        return -1;
+    }
+
+    memset(&h5str, 0, sizeof(h5str_t));
+    h5str_new_jhdf5(&h5str, 1024);
+    for (i=0; i<n; i++)
+    {
+        h5str.s[0] = '\0';
+        h5str_sprintf_jhdf5(&h5str, did, tid, ref_data[i]);
+        jstr = (*env)->NewStringUTF(env, h5str.s);
+
+        (*env)->SetObjectArrayElement(env, buf, i, jstr);
+    }
+
+    h5str_free_jhdf5(&h5str); 
+    free(ref_data);
+
+    return status;
+}
+
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite__IIIII_3S
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jshortArray buf)
+{
+    herr_t status;
+    jshort *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+#ifdef __cplusplus
+    env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+    (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite__IIIII_3I
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jintArray buf)
+{
+    herr_t status;
+    jint *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+#ifdef __cplusplus
+    env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+    (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite__IIIII_3J
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jlongArray buf)
+{
+    herr_t status;
+    jlong *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+#ifdef __cplusplus
+    env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+    (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite__IIIII_3F
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jfloatArray buf)
+{
+    herr_t status;
+    jfloat *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Dwrite__IIIII_3D
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id,
+  jint file_space_id, jint xfer_plist_id, jdoubleArray buf)
+{
+    herr_t status;
+    jdouble *buffP;
+    jboolean isCopy;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Dwrite:  buf is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    buffP = env->GetPrimitiveArrayCritical(buf,&isCopy);
+#else
+    buffP = (*env)->GetPrimitiveArrayCritical(env,buf,&isCopy);
+#endif
+    if (buffP == NULL) {
+        h5JNIFatalError( env, "H5Dwrite:  buf not pinned");
+        return -1;
+    }
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+#ifdef __cplusplus
+        env->ReleasePrimitiveArrayCritical(buf,buffP,JNI_ABORT);
+#else
+        (*env)->ReleasePrimitiveArrayCritical(env,buf,buffP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+// Rosetta Biosoftware 
+/* 
+ * Class:     ncsa_hdf_hdf5lib_H5 
+ * Method:    H5DwriteString 
+ * Signature: (IIIII[Ljava/lang/String;)I 
+ */ 
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5DwriteString 
+  (JNIEnv *env, jclass clss, jint dataset_id, jint mem_type_id, jint mem_space_id, 
+  jint file_space_id, jint xfer_plist_id, jobjectArray buf) 
+{ 
+    herr_t status; 
+    jboolean isCopy; 
+    char* * wdata; 
+    jsize size; 
+    jint i, j; 
+
+    if ( buf == NULL ) { 
+        h5nullArgument( env, "H5DwriteString:  buf is NULL"); 
+        return -1; 
+    } 
+
+    size = (*env)->GetArrayLength(env, (jarray) buf); 
+    wdata = malloc(size * sizeof (char *)); 
+
+    if (!wdata) { 
+        h5outOfMemory( env, "H5DwriteString:  cannot allocate buffer"); 
+        return -1; 
+    } 
+
+    memset(wdata, 0, size * sizeof(char *)); 
+
+    for (i = 0; i < size; ++i) { 
+        jstring obj = (jstring) (*env)->GetObjectArrayElement(env, (jobjectArray) buf, i); 
+        if (obj != 0) { 
+            jsize length = (*env)->GetStringUTFLength(env, obj); 
+            const char * utf8 = (*env)->GetStringUTFChars(env, obj, 0); 
+                        
+            if (utf8) { 
+                wdata[i] = malloc(strlen(utf8)+1); 
+                if (!wdata[i]) { 
+                    status = -1; 
+                    // can't allocate memory, cleanup 
+                    for (j = 0; j < i; ++i) { 
+                        if(wdata[j]) { 
+                            free(wdata[j]); 
+                        } 
+                    } 
+                    free(wdata); 
+
+                    (*env)->ReleaseStringUTFChars(env, obj, utf8); 
+                    (*env)->DeleteLocalRef(env, obj); 
+
+                    h5outOfMemory( env, "H5DwriteString:  cannot allocate buffer"); 
+                    return -1; 
+                } 
+
+                strcpy(wdata[i], utf8); 
+            } 
+
+            (*env)->ReleaseStringUTFChars(env, obj, utf8); 
+            (*env)->DeleteLocalRef(env, obj); 
+        } 
+    } 
+
+    status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, 
+        (hid_t)file_space_id, (hid_t)xfer_plist_id, wdata); 
+
+    // now free memory 
+    for (i = 0; i < size; ++i) { 
+        if(wdata[i]) { 
+            free(wdata[i]); 
+        } 
+    } 
+    free(wdata); 
+
+    if (status < 0) { 
+        h5libraryError(env); 
+    } 
+    return (jint)status; 
+} 
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5fImpJHDF5.c b/source/c/jhdf5/h5fImpJHDF5.c
new file mode 100755
index 0000000..edcd66c
--- /dev/null
+++ b/source/c/jhdf5/h5fImpJHDF5.c
@@ -0,0 +1,556 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  file interface functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *    http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fopen
+ * Signature: (Ljava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fopen
+  (JNIEnv *env, jclass clss, jstring name, jint flags, jint access_id)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fopen:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fopen:  file name not pinned");
+        return -1;
+    }
+    status = H5Fopen(file, (unsigned) flags, (hid_t) access_id );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fcreate
+ * Signature: (Ljava/lang/String;III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fcreate
+  (JNIEnv *env, jclass clss, jstring name, jint flags, jint create_id, jint access_id)
+{
+    hid_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fcreate:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fcreate:  file name is not pinned");
+        return -1;
+    }
+
+    status = H5Fcreate(file, flags, create_id, access_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fflush
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fflush
+  (JNIEnv *env, jclass clss, jint object_id, jint scope)
+{
+    herr_t retVal = -1;
+    retVal =  H5Fflush((hid_t) object_id, (H5F_scope_t) scope );
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fis_hdf5
+ * Signature: (Ljava/lang/String;)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fis_1hdf5
+  (JNIEnv *env, jclass clss, jstring name)
+{
+    htri_t retVal = 0;
+    char * file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fis_hdf5:  name is NULL");
+        return JNI_FALSE;
+    }
+
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fis_hdf5:  file name is not pinned");
+        return JNI_FALSE;
+    }
+
+    retVal = H5Fis_hdf5(file);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+
+    if (retVal > 0) {
+        return JNI_TRUE;
+    } else if (retVal == 0) {
+        return JNI_FALSE;
+    } else {
+        /*  raise exception here -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_create_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1create_1plist
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Fget_create_plist((hid_t) file_id );
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_access_plist
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1access_1plist
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Fget_access_plist((hid_t) file_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fclose
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    herr_t status = 0;
+
+	if (file_id > 0)
+        status = H5Fclose((hid_t) file_id );
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fmount
+ * Signature: (ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fmount
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint child_id, jint plist_id)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Fmount:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        /* exception -- out of memory? */
+        h5JNIFatalError( env, "H5Fmount:  file name is not pinned");
+        return -1;
+    }
+
+    status = H5Fmount((hid_t) loc_id, file, (hid_t) child_id, (hid_t) plist_id );
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Funmount
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Funmount
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        /* exception -- bad argument? */
+        h5nullArgument( env, "H5Funmount:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Funmount:  file name is not pinned");
+        /* exception -- out of memory? */
+        return -1;
+    }
+
+    status = H5Funmount((hid_t) loc_id, file );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Freopen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Freopen
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Freopen((hid_t)file_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_ids(hid_t file_id, unsigned int types, int maxObjs, hid_t *obj_id_list )
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1obj_1ids
+  (JNIEnv *env, jclass clss, jint file_id, jint types, jint obj_count, jintArray obj_id_list)
+{
+    herr_t status=-1;
+    jint *obj_id_listP;
+    jboolean isCopy;
+    int i;
+
+    if ( obj_id_list == NULL ) {
+        h5nullArgument( env, "H5Fget_obj_ids:  obj_id_list is NULL");
+        return -1;
+    }
+
+    obj_id_listP = (*env)->GetIntArrayElements(env,obj_id_list,&isCopy);
+    if (obj_id_listP == NULL) {
+        h5JNIFatalError( env, "H5Fget_obj_ids:  obj_id_list not pinned");
+        return -1;
+    }
+
+    status = H5Fget_obj_ids((hid_t)file_id, (unsigned int)types, (int)obj_count, (hid_t*)obj_id_listP);
+
+    if (status < 0) {
+        (*env)->ReleaseIntArrayElements(env,obj_id_list,obj_id_listP,JNI_ABORT);
+        h5libraryError(env);
+    } else  {
+        (*env)->ReleaseIntArrayElements(env,obj_id_list,obj_id_listP,0);
+    }
+
+    return (jint)status;
+}
+
+/* bug on 64-bit machines 
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1obj_1ids
+  (JNIEnv *env, jclass clss, jint file_id, jint types, jint maxObjs, jintArray obj_id_list)
+{
+    herr_t status;
+    jint *obj_id_listP;
+    jboolean isCopy;
+    hid_t *id_list;
+    int rank;
+    int i;
+
+    status = -1;
+
+    if ( obj_id_list == NULL ) {
+        h5nullArgument( env, "H5Fget_obj_ids:  obj_id_list is NULL");
+        return -1;
+    }
+
+
+#ifdef __cplusplus
+    obj_id_listP = env->GetIntArrayElements(obj_id_list,&isCopy);
+#else
+    obj_id_listP = (*env)->GetIntArrayElements(env,obj_id_list,&isCopy);
+#endif
+    if (obj_id_listP == NULL) {
+        h5JNIFatalError( env, "H5Fget_obj_ids:  obj_id_list not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    rank = (int)env->GetArrayLength(obj_id_list);
+#else
+    rank = (int)(*env)->GetArrayLength(env,obj_id_list);
+#endif
+
+    id_list = (hid_t *)malloc( rank * sizeof(hid_t));
+
+    if (id_list == NULL) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(obj_id_list,obj_id_listP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,obj_id_list,obj_id_listP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Fget_obj_ids:  obj_id_list not converted to hid_t");
+        return -1;
+    }
+
+    status = H5Fget_obj_ids((hid_t)file_id, (unsigned int)types, (int)maxObjs, id_list);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(obj_id_list,obj_id_listP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,obj_id_list,obj_id_listP,JNI_ABORT);
+#endif
+        free(id_list);
+        h5libraryError(env);
+    } else  {
+        for (i = 0; i < rank; i++) {
+            obj_id_listP[i] = id_list[i];
+        }
+        free(id_list);
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(obj_id_list,obj_id_listP,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,obj_id_list,obj_id_listP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+*/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Fget_obj_count(hid_t file_id, unsigned int types )
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1obj_1count
+  (JNIEnv *env, jclass clss, jint file_id, jint types )
+{
+    herr_t status;
+
+    status = -1;
+
+    status = H5Fget_obj_count((hid_t)file_id, (unsigned int)types );
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: ssize_t H5Fget_name (hid_t obj_id, char *name, size_t size)
+ * Purpose:
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1name
+  (JNIEnv *env, jclass clss, jint obj_id, jstring name, jint buf_size)
+{
+    char *aName;
+    jstring str;
+    ssize_t size;
+
+    if (buf_size <= 0) {
+        h5badArgument( env, "H5Fget_name:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*buf_size);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Fget_name:  malloc failed");
+        return -1;
+    }
+    size = H5Fget_name ((hid_t) obj_id, (char *)aName, (size_t)buf_size);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        /*  exception, returns immediately */
+    }
+    /* successful return -- save the string; */
+
+#ifdef __cplusplus
+    str = env->NewStringUTF(aName);
+#else
+    str = (*env)->NewStringUTF(env,aName);
+#endif
+    if (str == NULL) {
+        free(aName);
+        h5outOfMemory( env,"H5Fget_name:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+#ifdef __cplusplus
+    env->SetObjectArrayElement(name,0,str);
+#else
+    (*env)->SetObjectArrayElement(env,name,0,str);
+#endif
+
+    return (jlong)size;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Fget_filesize (hid_t file_id, hsize_t * size)
+ * Purpose:
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Fget_1filesize
+  (JNIEnv *env, jclass clss, jint file_id)
+{
+    herr_t status;
+    hsize_t size = 0;
+
+    status = H5Fget_filesize ((hid_t) file_id, (hsize_t *) &size);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) size;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5gImpJHDF5.c b/source/c/jhdf5/h5gImpJHDF5.c
new file mode 100755
index 0000000..2d779df
--- /dev/null
+++ b/source/c/jhdf5/h5gImpJHDF5.c
@@ -0,0 +1,1087 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Group Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+/* missing definitions from hdf5.h */
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+/* delete TRUE and FALSE when fixed in HDF5 */
+
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef __cplusplus
+herr_t obj_info_all(hid_t loc_id, const char *name, void *opdata);
+herr_t H5Gget_obj_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **objname, int *type );
+#else
+static herr_t obj_info_all(hid_t loc_id, const char *name, void *opdata);
+static herr_t H5Gget_obj_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **objname, int *type );
+#endif
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+typedef struct info_all
+{
+    JNIEnv *env;
+    char **objname;
+    int *type;
+    int count;
+} info_all_t;
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gcreate
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gcreate
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, 
+  jint link_create_plist_id, jint group_create_plist_id, 
+  jint group_access_plist_id)
+{
+    hid_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gcreate:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gcreate:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gcreate((hid_t)loc_id, gName, (hid_t)link_create_plist_id,
+    	(hid_t)group_create_plist_id, (hid_t)group_access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gopen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+{
+    herr_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gopen:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gopen:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Gopen((hid_t)loc_id, gName, (hid_t) access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gclose
+  (JNIEnv *env, jclass clss, jint group_id)
+{
+    herr_t retVal = 0;
+
+	if (group_id > 0)
+        retVal =  H5Gclose((hid_t)group_id) ;
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (IILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Glink
+  (JNIEnv *env, jclass clss, jint loc_id, jint link_type, jstring
+    current_name, jstring new_name)
+{
+    herr_t status;
+    char *cName, *nName;
+    jboolean isCopy;
+
+    if (current_name == NULL) {
+        h5nullArgument( env, "H5Glink:  current_name is NULL");
+        return -1;
+    }
+    if (new_name == NULL) {
+        h5nullArgument( env, "H5Glink:  new_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    cName = (char *)env->GetStringUTFChars(current_name,&isCopy);
+#else
+    cName = (char *)(*env)->GetStringUTFChars(env,current_name,&isCopy);
+#endif
+    if (cName == NULL) {
+        h5JNIFatalError( env, "H5Glink:  current_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    nName = (char *)env->GetStringUTFChars(new_name,&isCopy);
+#else
+    nName = (char *)(*env)->GetStringUTFChars(env,new_name,&isCopy);
+#endif
+    if (nName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(current_name,cName);
+#else
+        (*env)->ReleaseStringUTFChars(env,current_name,cName);
+#endif
+        h5JNIFatalError( env, "H5Glink:  new_name not pinned");
+        return -1;
+    }
+
+    status = H5Glink((hid_t)loc_id, (H5G_link_t)link_type, cName, nName);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(new_name,nName);
+    env->ReleaseStringUTFChars(current_name,cName);
+#else
+    (*env)->ReleaseStringUTFChars(env,new_name,nName);
+    (*env)->ReleaseStringUTFChars(env,current_name,cName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Glink
+ * Signature: (IILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Glink2
+  (JNIEnv *env, jclass clss, 
+    jint current_loc_id, jstring current_name, jint link_type, 
+    jint new_loc_id, jstring new_name)
+{
+    herr_t status;
+    char *cName, *nName;
+    jboolean isCopy;
+
+    if (current_name == NULL) {
+        h5nullArgument( env, "H5Glink2:  current_name is NULL");
+        return -1;
+    }
+    if (new_name == NULL) {
+        h5nullArgument( env, "H5Glink2:  new_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    cName = (char *)env->GetStringUTFChars(current_name,&isCopy);
+#else
+    cName = (char *)(*env)->GetStringUTFChars(env,current_name,&isCopy);
+#endif
+    if (cName == NULL) {
+        h5JNIFatalError( env, "H5Glink2:  current_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    nName = (char *)env->GetStringUTFChars(new_name,&isCopy);
+#else
+    nName = (char *)(*env)->GetStringUTFChars(env,new_name,&isCopy);
+#endif
+    if (nName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(current_name,cName);
+#else
+        (*env)->ReleaseStringUTFChars(env,current_name,cName);
+#endif
+        h5JNIFatalError( env, "H5Glink2:  new_name not pinned");
+        return -1;
+    }
+
+    status = H5Glink2((hid_t)current_loc_id, cName, (H5G_link_t)link_type, (hid_t)new_loc_id, nName);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(new_name,nName);
+    env->ReleaseStringUTFChars(current_name,cName);
+#else
+    (*env)->ReleaseStringUTFChars(env,new_name,nName);
+    (*env)->ReleaseStringUTFChars(env,current_name,cName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gunlink
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gunlink
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name)
+{
+    herr_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gunlink:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gunlink:  name not pinned");
+        return -1;
+    }
+
+    status = H5Gunlink((hid_t)loc_id, gName );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gmove
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gmove
+  (JNIEnv *env, jclass clss, jint loc_id, jstring src, jstring dst)
+{
+    herr_t status;
+    char *sName, *dName;
+    jboolean isCopy;
+
+    if (src == NULL) {
+        h5nullArgument( env, "H5Gmove:  src is NULL");
+        return -1;
+    }
+    if (dst == NULL) {
+        h5nullArgument( env, "H5Gmove:  dst is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    sName = (char *)env->GetStringUTFChars(src,&isCopy);
+#else
+    sName = (char *)(*env)->GetStringUTFChars(env,src,&isCopy);
+#endif
+    if (sName == NULL) {
+        h5JNIFatalError( env, "H5Gmove:  src not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    dName = (char *)env->GetStringUTFChars(dst,&isCopy);
+#else
+    dName = (char *)(*env)->GetStringUTFChars(env,dst,&isCopy);
+#endif
+    if (dName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(src,sName);
+#else
+        (*env)->ReleaseStringUTFChars(env,src,sName);
+#endif
+        h5JNIFatalError( env, "H5Gmove:  dst not pinned");
+        return -1;
+    }
+
+    status = H5Gmove((hid_t)loc_id, sName, dName );
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(dst,dName);
+    env->ReleaseStringUTFChars(src,sName);
+#else
+    (*env)->ReleaseStringUTFChars(env,dst,dName);
+    (*env)->ReleaseStringUTFChars(env,src,sName);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objinfo
+ * Signature: (ILjava/lang/String;Z[J[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1objinfo
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jboolean follow_link,
+  jlongArray fileno, jlongArray objno, jintArray link_info, jlongArray mtime)
+{
+    char* gName;
+    jboolean isCopy;
+    herr_t retVal;
+    jint *linkInfo;
+    jlong *fileInfo, *objInfo, *timeInfo;
+    hbool_t follow;
+    H5G_stat_t h5gInfo;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  name is NULL");
+        return -1;
+    }
+    if (follow_link == JNI_TRUE) {
+        follow = TRUE;  /*  HDF5 'TRUE' */
+    } else if (follow_link == JNI_FALSE) {
+        follow = FALSE;  /*  HDF5 'FALSE' */
+    } else {
+        h5badArgument( env, "H5Gget_objinfo:  follow_link is invalid");
+    }
+    if (fileno == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  fileno is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(fileno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  fileno input array < 2");
+    }
+#else
+    if ((*env)->GetArrayLength(env, fileno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  fileno input array < 2");
+    }
+#endif
+    if (objno == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  objno is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(objno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  objno input array < 2");
+    }
+#else
+    if ((*env)->GetArrayLength(env, objno) < 2) {
+        h5badArgument( env, "H5Gget_objinfo:  objno input array < 2");
+    }
+#endif
+    if (link_info == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  link_info is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(link_info) < 3) {
+        h5badArgument( env, "H5Gget_objinfo:  link_info input array < 3");
+    }
+#else
+    if ((*env)->GetArrayLength(env, link_info) < 3) {
+        h5badArgument( env, "H5Gget_objinfo:  link_info input array < 3");
+    }
+#endif
+    if (mtime == NULL) {
+        h5nullArgument( env, "H5Gget_objinfo:  mtime is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gget_object:  name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    fileInfo = (jlong *)env->GetLongArrayElements(fileno,&isCopy);
+#else
+    fileInfo = (jlong *)(*env)->GetLongArrayElements(env,fileno,&isCopy);
+#endif
+    if (fileInfo == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5JNIFatalError( env, "H5Gget_object:  fileno not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    objInfo = (jlong *)env->GetLongArrayElements(objno,&isCopy);
+#else
+    objInfo = (jlong *)(*env)->GetLongArrayElements(env,objno,&isCopy);
+#endif
+    if (objInfo == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(fileno,fileInfo,JNI_ABORT);
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseLongArrayElements(env,fileno,fileInfo,JNI_ABORT);
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5JNIFatalError( env, "H5Gget_object:  objno not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    linkInfo = (jint *)env->GetIntArrayElements(link_info,&isCopy);
+#else
+    linkInfo = (jint *)(*env)->GetIntArrayElements(env,link_info,&isCopy);
+#endif
+    if (linkInfo == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(objno,objInfo,JNI_ABORT);
+        env->ReleaseLongArrayElements(fileno,fileInfo,JNI_ABORT);
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseLongArrayElements(env,objno,objInfo,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,fileno,fileInfo,JNI_ABORT);
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5JNIFatalError( env, "H5Gget_object:  link_info not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    timeInfo = (jlong *)env->GetLongArrayElements(mtime,&isCopy);
+#else
+    timeInfo = (jlong *)(*env)->GetLongArrayElements(env,mtime,&isCopy);
+#endif
+    if (timeInfo == NULL) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(link_info,linkInfo,JNI_ABORT);
+        env->ReleaseLongArrayElements(objno,objInfo,JNI_ABORT);
+        env->ReleaseLongArrayElements(fileno,fileInfo,JNI_ABORT);
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseIntArrayElements(env,link_info,linkInfo,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,objno,objInfo,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,fileno,fileInfo,JNI_ABORT);
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5JNIFatalError( env, "H5Gget_object:  mtime not pinned");
+        return -1;
+    }
+
+    retVal = H5Gget_objinfo((hid_t)loc_id, gName, follow, &h5gInfo);
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(mtime,timeInfo,JNI_ABORT);
+        env->ReleaseLongArrayElements(objno,objInfo,JNI_ABORT);
+        env->ReleaseLongArrayElements(fileno,fileInfo,JNI_ABORT);
+        env->ReleaseIntArrayElements(link_info,linkInfo,JNI_ABORT);
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseLongArrayElements(env,mtime,timeInfo,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,objno,objInfo,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,fileno,fileInfo,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,link_info,linkInfo,JNI_ABORT);
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5libraryError(env);
+    } else {
+        fileInfo[0] = (jlong)h5gInfo.fileno[0];
+        fileInfo[1] = (jlong)h5gInfo.fileno[1];
+        objInfo[0] = (jlong)h5gInfo.objno[0];
+        objInfo[1] = (jlong)h5gInfo.objno[1];
+        timeInfo[0] = (jlong)h5gInfo.mtime;
+        linkInfo[0] = (jint)h5gInfo.nlink;
+        linkInfo[1] = (jint)h5gInfo.type;
+        linkInfo[2] = (jint)h5gInfo.linklen;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(mtime,timeInfo,0);
+        env->ReleaseLongArrayElements(objno,objInfo,0);
+        env->ReleaseLongArrayElements(fileno,fileInfo,0);
+        env->ReleaseIntArrayElements(link_info,linkInfo,0);
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseLongArrayElements(env,mtime,timeInfo,0);
+        (*env)->ReleaseLongArrayElements(env,objno,objInfo,0);
+        (*env)->ReleaseLongArrayElements(env,fileno,fileInfo,0);
+        (*env)->ReleaseIntArrayElements(env,link_info,linkInfo,0);
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_linkval
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1linkval
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint size, jobjectArray value)
+{
+    char* gName;
+    jboolean isCopy;
+    char *lValue;
+    jstring str;
+    herr_t status;
+
+    if (size < 0) {
+        h5badArgument( env, "H5Gget_linkval:  size < 0");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_linkval:  name is NULL");
+        return -1;
+    }
+    lValue = (char *) malloc(sizeof(char)*size);
+    if (lValue == NULL) {
+        h5outOfMemory( env, "H5Gget_linkval:  malloc failed ");
+        return -1;
+    }
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (gName == NULL) {
+        free(lValue);
+        h5JNIFatalError( env, "H5Gget_linkval:  name not pinned");
+        return -1;
+    }
+
+    status = H5Gget_linkval((hid_t)loc_id, gName, (size_t)size, lValue);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    if (status >= 0)
+    {
+        /* may throw OutOfMemoryError */
+#ifdef __cplusplus
+        str = env->NewStringUTF(lValue);
+#else
+        str = (*env)->NewStringUTF(env,lValue);
+#endif
+        if (str == NULL) {
+            /* exception -- fatal JNI error */
+            free(lValue);
+            h5outOfMemory( env, "H5Gget_linkval:  return string not created");
+            return -1;
+        }
+        /*  the SetObjectArrayElement may raise exceptions... */
+#ifdef __cplusplus
+        env->SetObjectArrayElement(value,0,(jobject)str);
+#else
+        (*env)->SetObjectArrayElement(env,value,0,(jobject)str);
+#endif
+        free(lValue);
+    } else {
+        free(lValue);
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gset_comment
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gset_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jstring comment)
+{
+    herr_t status;
+    char *gName, *gComment;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gset_comment:  name is NULL");
+        return -1;
+    }
+    if (comment == NULL) {
+        h5nullArgument( env, "H5Gset_comment:  comment is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gset_comment:  name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    gComment = (char *)env->GetStringUTFChars(comment,&isCopy);
+#else
+    gComment = (char *)(*env)->GetStringUTFChars(env,comment,&isCopy);
+#endif
+    if (gComment == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(name,gName);
+#else
+        (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+        h5JNIFatalError( env, "H5Gset_comment:  comment not pinned");
+        return -1;
+    }
+
+    status = H5Gset_comment((hid_t)loc_id, gName, gComment);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(comment,gComment);
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,comment,gComment);
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_comment
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1comment
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint bufsize,
+  jobjectArray comment)
+{
+    char* gName;
+    jboolean isCopy;
+    char *gComment;
+    jstring str;
+    jint status;
+
+    if (bufsize <= 0) {
+        h5badArgument( env, "H5Gget_comment:  bufsize <= 0");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Gget_comment:  name is NULL");
+        return -1;
+    }
+    if (comment == NULL) {
+        h5nullArgument( env, "H5Gget_comment:  comment is NULL");
+        return -1;
+    }
+    gComment = (char *)malloc(sizeof(char)*bufsize);
+    if (gComment == NULL) {
+        /* exception -- out of memory */
+        h5outOfMemory( env, "H5Gget_comment:  malloc failed");
+        return -1;
+    }
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (gName == NULL) {
+        free(gComment);
+        h5JNIFatalError( env, "H5Gget_comment:  name not pinned");
+        return -1;
+    }
+    status = H5Gget_comment((hid_t)loc_id, gName, (size_t)bufsize, gComment);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    if (status >= 0)
+    {
+        /*  may throw OutOfMemoryError */
+#ifdef __cplusplus
+        str = env->NewStringUTF(gComment);
+#else
+        str = (*env)->NewStringUTF(env,gComment);
+#endif
+        if (str == NULL) {
+            free(gComment);
+            h5outOfMemory( env, "H5Gget_comment:  return string not allocated");
+            return -1;
+        }
+        /*  The SetObjectArrayElement may raise exceptions */
+#ifdef __cplusplus
+        env->SetObjectArrayElement(comment,0,(jobject)str);
+#else
+        (*env)->SetObjectArrayElement(env,comment,0,(jobject)str);
+#endif
+        free(gComment);
+    } else {
+        free(gComment);
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+
+/***************************************************************
+ *                   New APIs for HDF5.1.6                     *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_num_objs
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1num_1objs
+  (JNIEnv *env, jclass clss, jint loc_id, jlongArray num_obj)
+{
+    int status;
+    jlong *num_objP;
+    jboolean isCopy;
+    hsize_t *num_obja;
+    int i;
+    int rank;
+
+    if (num_obj == NULL) {
+        h5nullArgument( env, "H5Gget_num_objs:  num_obj is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    num_objP = env->GetLongArrayElements(num_obj,&isCopy);
+#else
+    num_objP = (*env)->GetLongArrayElements(env,num_obj,&isCopy);
+#endif
+    if (num_objP == NULL) {
+        h5JNIFatalError(env,  "H5Gget_num_objs:  num_obj not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    rank = (int) env->GetArrayLength(num_obj);
+#else
+    rank = (int) (*env)->GetArrayLength(env,num_obj);
+#endif
+    num_obja = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (num_obja == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(num_obj,num_objP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,num_obj,num_objP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Gget_num_objs:  num_obj not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Gget_num_objs(loc_id, (hsize_t *)num_obja);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(num_obj,num_objP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,num_obj,num_objP,JNI_ABORT);
+#endif
+        free(num_obja);
+        h5libraryError(env);
+    } else {
+        for (i = 0; i < rank; i++) {
+            num_objP[i] = num_obja[i];
+        }
+        free(num_obja);
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(num_objs,num_objP,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,num_obj,num_objP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objname_by_idx(hid_t group_id, hsize_t idx, char *name, size_t* size )
+ * Signature: (IJLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1objname_1by_1idx
+  (JNIEnv *env, jclass clss, jint group_id, jlong idx, jobjectArray name, jlong buf_size)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    long bs;
+
+    bs = (long)buf_size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Gget_objname_by_idx:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Gget_objname_by_idx:  malloc failed");
+        return -1;
+    }
+    size = H5Gget_objname_by_idx((hid_t)group_id, (hsize_t)idx, aName, (size_t)buf_size);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+        /*  exception, returns immediately */
+    }
+    /* successful return -- save the string; */
+#ifdef __cplusplus
+    str = env->NewStringUTF(aName);
+#else
+    str = (*env)->NewStringUTF(env,aName);
+#endif
+    if (str == NULL) {
+        free(aName);
+        h5outOfMemory( env,"H5Gget_objname_by_idx:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+#ifdef __cplusplus
+    env->SetObjectArrayElement(name,0,str);
+#else
+    (*env)->SetObjectArrayElement(env,name,0,str);
+#endif
+
+    return (jlong)size;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_objtype_by_idx(hid_t group_id, hsize_t idx )
+ * Signature: (IJLjava/lang/String;)J
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1objtype_1by_1idx
+  (JNIEnv *env, jclass clss, jint group_id, jlong idx)
+{
+    int type;
+
+    type = H5Gget_objtype_by_idx((hid_t)group_id, (hsize_t)idx );
+    if (type < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)type;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_info(hid_t group_id, H5G_info_t group_info )
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1nlinks
+  (JNIEnv *env, jclass clss, jint group_id)
+{
+	H5G_info_t group_info;
+    int status;
+
+    status = H5Gget_info((hid_t)group_id, &group_info );
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return group_info.nlinks;
+}
+
+/*
+/////////////////////////////////////////////////////////////////////////////////
+//
+//
+// Add these methods so that we don't need to call H5Gget_objtype_by_idx
+// in a loop to get information for all the object in a group, which takes
+// a lot of time to finish if the number of objects is more than 10,000
+//
+/////////////////////////////////////////////////////////////////////////////////
+*/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Gget_obj_info_all
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Gget_1obj_1info_1all
+  (JNIEnv *env, jclass clss, jint loc_id, jstring group_name,
+    jobjectArray objName, jintArray oType, jint n)
+{
+    herr_t status;
+    char *gName=NULL;
+    char **oName=NULL;
+    jboolean isCopy;
+    jstring str;
+    jint *tarr;
+    int i;
+
+    if (group_name == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_all:  group_name is NULL");
+        return -1;
+    }
+
+    if (oType == NULL) {
+        h5nullArgument( env, "H5Gget_obj_info_all:  oType is NULL");
+        return -1;
+    }
+
+    gName = (char *)(*env)->GetStringUTFChars(env,group_name,&isCopy);
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Gget_obj_info_all:  group_name not pinned");
+        return -1;
+    }
+
+    tarr = (*env)->GetIntArrayElements(env,oType,&isCopy);
+    if (tarr == NULL) {
+        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+        h5JNIFatalError( env, "H5Gget_obj_info_all:  type not pinned");
+        return -1;
+    }
+
+    oName = malloc(n * sizeof (*oName));
+    if (oName == NULL) {
+        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,0);
+        h5outOfMemory(env, "H5Gget_obj_info_all:  malloc failed");
+        return -1;
+    }
+    status = H5Gget_obj_info_all( env, (hid_t) loc_id, gName,  oName, (int *)tarr );
+
+    (*env)->ReleaseStringUTFChars(env,group_name,gName);
+    if (status < 0) {
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,JNI_ABORT);
+        h5str_array_free_jhdf5(oName, n);
+        h5libraryError(env);
+    } else {
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,0);
+
+        for (i=0; i<n; i++) {
+            if (*(oName+i)) {
+                str = (*env)->NewStringUTF(env,*(oName+i));
+                (*env)->SetObjectArrayElement(env,objName,i,(jobject)str);
+            }
+        } /* for (i=0; i<n; i++)*/
+        h5str_array_free_jhdf5(oName, n);
+    }
+
+    return (jint)status;
+
+}
+
+herr_t H5Gget_obj_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **objname, int *type )
+{
+    info_all_t info;
+    info.env = env;
+    info.objname = objname;
+    info.type = type;
+    info.count = 0;
+
+    if(H5Giterate(loc_id, group_name, NULL, obj_info_all, (void *)&info)<0)
+        return -1;
+
+    return 0;
+}
+
+herr_t obj_info_all(hid_t loc_id, const char *name, void *opdata)
+{
+    H5O_info_t ib;
+    info_all_t* info = (info_all_t*)opdata;
+    
+    if ( H5Oget_info_by_name(loc_id, name, &ib, H5P_DEFAULT) < 0 )
+    {
+        *(info->type+info->count) = -1;
+        *(info->objname+info->count) = NULL;
+    } else {
+        *(info->type+info->count) = ib.type;
+        *(info->objname+info->count) = (char *) malloc(strlen(name)+1);
+	    if (*(info->objname+info->count) == NULL)
+	    {
+	        h5outOfMemory(info->env, "H5Gget_link_info_all:  malloc failed");
+	        return -1;
+	    }
+        strcpy(*(info->objname+info->count), name);
+    }
+    info->count++;
+
+    return 0;
+}
+
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5iImpJHDF5.c b/source/c/jhdf5/h5iImpJHDF5.c
new file mode 100755
index 0000000..cc1a094
--- /dev/null
+++ b/source/c/jhdf5/h5iImpJHDF5.c
@@ -0,0 +1,188 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Identifier API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5Header
+ * Method:    H5Gget_linkval
+ * Signature: (ILjava/lang/String;I[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Iget_1type
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    H5I_type_t retVal = H5I_BADID;
+    retVal =  H5Iget_type((hid_t)obj_id);
+    if (retVal == H5I_BADID) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.2 versus release 1.6.1          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Iget_name(hid_t obj_id, char *name, size_t size )
+ * Signature: (IJLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Iget_1name
+  (JNIEnv *env, jclass clss, jint obj_id, jobjectArray name, jlong buf_size)
+{
+    char *aName;
+    jstring str;
+    hssize_t size;
+    size_t bs;
+
+    bs = (size_t)buf_size;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Iget_name:  buf_size <= 0");
+        return -1;
+    }
+    aName = (char*)malloc(sizeof(char)*(bs+1));
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Iget_name:  malloc failed");
+        return -1;
+    }
+    size = H5Iget_name((hid_t)obj_id, aName, bs);
+    if (size < 0) {
+        free(aName);
+        h5libraryError(env);
+	return -1;
+    }
+    /* successful return -- save the string; */
+#ifdef __cplusplus
+    str = env->NewStringUTF(aName);
+#else
+    str = (*env)->NewStringUTF(env,aName);
+#endif
+    if (str == NULL) {
+        free(aName);
+        h5outOfMemory( env,"H5Iget_name:  return string failed");
+        return -1;
+    }
+    free(aName);
+    /*  Note: throws ArrayIndexOutOfBoundsException,
+        ArrayStoreException */
+#ifdef __cplusplus
+    env->SetObjectArrayElement(name,0,str);
+#else
+    (*env)->SetObjectArrayElement(env,name,0,str);
+#endif
+
+    return (jlong)size;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: int H5Iget_ref(hid_t obj_id)
+ * Purpose:   Retrieves the reference count for an object
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Iget_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Iget_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: int H5Iinc_ref(hid_t obj_id)
+ * Purpose:   Increments the reference count for an object
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Iinc_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Iinc_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: int H5Idec_ref(hid_t obj_id)
+ * Purpose:   Decrements the reference count for an object
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Idec_1ref
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    int retVal = -1;
+    retVal = H5Idec_ref( (hid_t)obj_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature:  hid_t H5Iget_file_id (hid_t obj_id)
+ * Purpose:
+ */
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Iget_1file_1id
+  (JNIEnv *env, jclass clss, jint obj_id)
+{
+    hid_t file_id = 0;
+
+    file_id = H5Iget_file_id ((hid_t) obj_id);
+
+    if (file_id < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) file_id;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5lImpJHDF5.c b/source/c/jhdf5/h5lImpJHDF5.c
new file mode 100644
index 0000000..45c8a57
--- /dev/null
+++ b/source/c/jhdf5/h5lImpJHDF5.c
@@ -0,0 +1,768 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Group Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include "h5utilJHDF5.h"
+/* missing definitions from hdf5.h */
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+/* delete TRUE and FALSE when fixed in HDF5 */
+
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+extern int getMinorErrorNumber();
+
+#ifdef __cplusplus
+herr_t link_info_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata);
+herr_t H5Lget_link_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names, int *type, char **linknames );
+herr_t link_names_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata);
+herr_t H5Lget_link_names_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names );
+#else
+static herr_t link_info_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata);
+static herr_t H5Lget_link_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names, int *type, char **linknames );
+static herr_t link_names_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata);
+static herr_t H5Lget_link_names_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names );
+#endif
+
+typedef struct link_info_all
+{
+	JNIEnv *env;
+    char **name;
+    int *type;
+    char **linkname;
+    int count;
+} link_info_all_t;
+
+char *get_external_link(  JNIEnv *env, const char *linkval_buf, size_t size ) {
+      const char *filename;
+      const char *obj_path;
+      char *external_link_buf;
+      const char *prefix = "EXTERNAL::";
+      H5Lunpack_elink_val(linkval_buf, size, NULL, &filename, &obj_path);
+      external_link_buf = (char *) malloc(strlen(prefix) + strlen(filename) + strlen(obj_path) + 3);
+      if (external_link_buf == NULL)
+      {
+	        h5outOfMemory(env, "get_external_link: malloc failed");
+	        return NULL;
+      }
+      strcpy(external_link_buf, prefix);
+      strcat(external_link_buf, filename);
+      strcat(external_link_buf, "::");
+      strcat(external_link_buf, obj_path);
+      return external_link_buf;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lcreate_hard
+ * Signature: (ILjava/lang/String;ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lcreate_1hard
+  (JNIEnv *env, jclass clss, jint obj_loc_id, jstring
+    obj_name, jint link_loc_id, jstring link_name, jint lcpl_id, jint lapl_id)
+{
+    herr_t status;
+    char *oName, *lName;
+    jboolean isCopy;
+
+    if (obj_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_hard:  obj_name is NULL");
+        return -1;
+    }
+    if (link_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_hard:  link_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    oName = (char *)env->GetStringUTFChars(obj_name,&isCopy);
+#else
+    oName = (char *)(*env)->GetStringUTFChars(env,obj_name,&isCopy);
+#endif
+    if (oName == NULL) {
+        h5JNIFatalError( env, "H5Lcreate_hard:  obj_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    lName = (char *)env->GetStringUTFChars(link_name,&isCopy);
+#else
+    lName = (char *)(*env)->GetStringUTFChars(env,link_name,&isCopy);
+#endif
+    if (lName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(obj_name,oName);
+#else
+        (*env)->ReleaseStringUTFChars(env,obj_name,oName);
+#endif
+        h5JNIFatalError( env, "H5Lcreate_hard:  link_name not pinned");
+        return -1;
+    }
+
+    status = H5Lcreate_hard((hid_t)obj_loc_id, oName, (hid_t)link_loc_id, lName, lcpl_id, lapl_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(link_name,lName);
+    env->ReleaseStringUTFChars(obj_name,oName);
+#else
+    (*env)->ReleaseStringUTFChars(env,link_name,lName);
+    (*env)->ReleaseStringUTFChars(env,obj_name,oName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lcreate_soft
+ * Signature: (Ljava/lang/String;ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lcreate_1soft
+  (JNIEnv *env, jclass clss, jstring target_path, 
+  	jint link_loc_id, jstring link_name, jint lcpl_id, jint lapl_id)
+{
+    herr_t status;
+    char *tPath, *lName;
+    jboolean isCopy;
+
+    if (target_path == NULL) {
+        h5nullArgument( env, "H5Lcreate_soft:  target_path is NULL");
+        return -1;
+    }
+    if (link_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_soft:  link_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    tPath = (char *)env->GetStringUTFChars(target_path,&isCopy);
+#else
+    tPath = (char *)(*env)->GetStringUTFChars(env,target_path,&isCopy);
+#endif
+    if (tPath == NULL) {
+        h5JNIFatalError( env, "H5Lcreate_soft:  target_path not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    lName = (char *)env->GetStringUTFChars(link_name,&isCopy);
+#else
+    lName = (char *)(*env)->GetStringUTFChars(env,link_name,&isCopy);
+#endif
+    if (lName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(target_path,tPath);
+#else
+        (*env)->ReleaseStringUTFChars(env,target_path,tPath);
+#endif
+        h5JNIFatalError( env, "H5Lcreate_soft:  link_name not pinned");
+        return -1;
+    }
+
+    status = H5Lcreate_soft(tPath, (hid_t)link_loc_id, lName, lcpl_id, lapl_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(link_name,lName);
+    env->ReleaseStringUTFChars(target_path,tPath);
+#else
+    (*env)->ReleaseStringUTFChars(env,link_name,lName);
+    (*env)->ReleaseStringUTFChars(env,target_path,tPath);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lcreate_external
+ * Signature: (Ljava/lang/String;Ljava/lang/String;ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lcreate_1external
+  (JNIEnv *env, jclass clss, jstring file_name, jstring
+    obj_name, jint link_loc_id, jstring link_name, jint lcpl_id, jint lapl_id)
+{
+    herr_t status;
+    char *fName, *oName, *lName;
+    jboolean isCopy;
+
+    if (file_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_external:  file_name is NULL");
+        return -1;
+    }
+    if (obj_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_external:  obj_name is NULL");
+        return -1;
+    }
+    if (link_name == NULL) {
+        h5nullArgument( env, "H5Lcreate_external:  link_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    fName = (char *)env->GetStringUTFChars(file_name,&isCopy);
+#else
+    fName = (char *)(*env)->GetStringUTFChars(env,file_name,&isCopy);
+#endif
+    if (fName == NULL) {
+        h5JNIFatalError( env, "H5Lcreate_external:  file_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    oName = (char *)env->GetStringUTFChars(obj_name,&isCopy);
+#else
+    oName = (char *)(*env)->GetStringUTFChars(env,obj_name,&isCopy);
+#endif
+    if (oName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(file_name,fName);
+#else
+        (*env)->ReleaseStringUTFChars(env,file_name,fName);
+#endif
+        h5JNIFatalError( env, "H5Lcreate_external:  obj_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    lName = (char *)env->GetStringUTFChars(link_name,&isCopy);
+#else
+    lName = (char *)(*env)->GetStringUTFChars(env,link_name,&isCopy);
+#endif
+    if (lName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(file_name,fName);
+        env->ReleaseStringUTFChars(obj_name,oName);
+#else
+        (*env)->ReleaseStringUTFChars(env,file_name,fName);
+        (*env)->ReleaseStringUTFChars(env,obj_name,oName);
+#endif
+        h5JNIFatalError( env, "H5Lcreate_external:  link_name not pinned");
+        return -1;
+    }
+
+    status = H5Lcreate_external(fName, oName, (hid_t)link_loc_id, lName, lcpl_id, lapl_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(file_name,fName);
+    env->ReleaseStringUTFChars(link_name,lName);
+    env->ReleaseStringUTFChars(obj_name,oName);
+#else
+    (*env)->ReleaseStringUTFChars(env,file_name,fName);
+    (*env)->ReleaseStringUTFChars(env,link_name,lName);
+    (*env)->ReleaseStringUTFChars(env,obj_name,oName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lmove
+ * Signature: (ILjava/lang/String;ILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lmove
+  (JNIEnv *env, jclass clss, jint src_loc_id, jstring
+    src_name, jint dest_loc_id, jstring dest_name, jint lcpl_id, jint lapl_id)
+{
+    herr_t status;
+    char *srcName, *dstName;
+    jboolean isCopy;
+
+    if (src_name == NULL) {
+        h5nullArgument( env, "H5Lmove:  src_name is NULL");
+        return -1;
+    }
+    if (dest_name == NULL) {
+        h5nullArgument( env, "H5Lmove:  dest_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    srcName = (char *)env->GetStringUTFChars(src_name,&isCopy);
+#else
+    srcName = (char *)(*env)->GetStringUTFChars(env,src_name,&isCopy);
+#endif
+    if (srcName == NULL) {
+        h5JNIFatalError( env, "H5Lmove:  src_name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    dstName = (char *)env->GetStringUTFChars(dest_name,&isCopy);
+#else
+    dstName = (char *)(*env)->GetStringUTFChars(env,dest_name,&isCopy);
+#endif
+    if (dstName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(src_name,srcName);
+#else
+        (*env)->ReleaseStringUTFChars(env,src_name,srcName);
+#endif
+        h5JNIFatalError( env, "H5Lmove:  dest_name not pinned");
+        return -1;
+    }
+
+    status = H5Lmove((hid_t)src_loc_id, srcName, (hid_t)dest_loc_id, dstName, lcpl_id, lapl_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(dest_name,dstName);
+    env->ReleaseStringUTFChars(src_name,srcName);
+#else
+    (*env)->ReleaseStringUTFChars(env,dest_name,dstName);
+    (*env)->ReleaseStringUTFChars(env,src_name,srcName);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lget_link_info
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lget_1link_1info
+  (JNIEnv *env, jclass clss, jint loc_id, jstring object_name,
+    jobjectArray linkName, jboolean exception_when_non_existent)
+{
+    jint type;
+    herr_t status;
+    int minor_err_num;
+    char *oName;
+   	char *linkval_buf;
+   	char *linkname_buf;
+    jboolean isCopy;
+    jstring str;
+    H5L_info_t link_info;
+    H5O_info_t obj_info;
+
+    if (object_name == NULL) {
+        h5nullArgument( env, "H5Lget_link_info:  object_name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    oName = (char *)env->GetStringUTFChars(object_name,&isCopy);
+#else
+    oName = (char *)(*env)->GetStringUTFChars(env,object_name,&isCopy);
+#endif
+    if (oName == NULL) {
+        h5JNIFatalError( env, "H5Lget_link_info:  object_name not pinned");
+        return -1;
+    }
+
+    type = H5Lget_info( (hid_t) loc_id, oName, &link_info, H5P_DEFAULT );
+
+    if (type < 0) {
+#ifdef __cplusplus
+		    env->ReleaseStringUTFChars(object_name,oName);
+#else
+		    (*env)->ReleaseStringUTFChars(env,object_name,oName);
+#endif
+        if (exception_when_non_existent == JNI_FALSE)
+        {
+            minor_err_num = getMinorErrorNumber();
+            /*
+             * Note: H5E_CANTINSERT is thrown by the dense group lookup, see H5Gdense:534. That is
+             * probably a wrong error code, but we have to deal with it here anyway.
+             */
+            if (minor_err_num  == H5E_NOTFOUND || minor_err_num == H5E_CANTINSERT)
+            {
+                return -1;
+            }
+        }
+        h5libraryError(env);
+    } else {
+		    str = NULL;
+		    if (link_info.type == H5L_TYPE_HARD)
+		    {
+		    	status = H5Oget_info_by_name(loc_id, oName, &obj_info, H5P_DEFAULT); 
+			    (*env)->ReleaseStringUTFChars(env,object_name,oName);
+			    if (status  < 0 )
+			    {
+		        h5libraryError(env);
+		        return -1;
+			    } else {
+			        type = obj_info.type;
+			    }
+			  } else
+			  {
+		      type = H5O_TYPE_NTYPES + link_info.type;
+		      if (linkName != NULL)
+		      {
+			    	linkval_buf = (char*) malloc(link_info.u.val_size);
+				    if (linkval_buf == NULL)
+				    {
+				        h5outOfMemory(env, "H5Lget_link_info: malloc failed");
+			  	      return -1;
+			    	}
+				    if (H5Lget_val(loc_id, oName, linkval_buf, link_info.u.val_size, H5P_DEFAULT) < 0)
+				    {
+			        h5libraryError(env);
+							return -1;					
+				    }
+				    if (link_info.type == H5L_TYPE_EXTERNAL)
+				    {
+				        linkname_buf = get_external_link(env, linkval_buf, link_info.u.val_size);
+				        free(linkval_buf);
+			  	  } else
+			    	{
+			      	  linkname_buf = linkval_buf;
+			    	}
+            str = (*env)->NewStringUTF(env,linkname_buf);
+	          (*env)->SetObjectArrayElement(env,linkName,0,(jobject)str);
+	        }
+	      }
+    }
+
+    return (jint)type;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lexists
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lexists
+  (JNIEnv *env, jclass clss, jint loc_id, jstring link_name)
+{
+    htri_t exists;
+    char *lName;
+    jboolean isCopy;
+
+    if (link_name == NULL) {
+        h5nullArgument( env, "H5Lexists:  link_name is NULL");
+        return -1;
+    }
+
+    lName = (char *)(*env)->GetStringUTFChars(env,link_name,&isCopy);
+    if (lName == NULL) {
+        h5JNIFatalError( env, "H5Lexists:  link_name not pinned");
+        return -1;
+    }
+
+    exists = H5Lexists( (hid_t) loc_id, lName, H5P_DEFAULT );
+    if (exists < 0)
+    {
+        if (getMinorErrorNumber() == H5E_NOTFOUND)
+        {
+            exists = 0;
+        } else
+        {
+            h5libraryError(env);
+        }
+    }
+
+    (*env)->ReleaseStringUTFChars(env,link_name,lName);
+    
+    return exists;
+}
+
+/*
+/////////////////////////////////////////////////////////////////////////////////
+//
+//
+// Add these methods so that we don't need to call H5Lget_info
+// in a loop to get information for all the object in a group, which takes
+// a lot of time to finish if the number of objects is more than 10,000
+//
+/////////////////////////////////////////////////////////////////////////////////
+*/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lget_link_names_all
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lget_1link_1names_1all
+  (JNIEnv *env, jclass clss, jint loc_id, jstring group_name,
+    jobjectArray objName, jint n)
+{
+    herr_t status;
+    char *gName=NULL;
+    char **oName=NULL;
+    char **lName=NULL;
+    jboolean isCopy;
+    jstring str;
+    jint *tarr;
+    int i;
+
+    if (group_name == NULL) {
+        h5nullArgument( env, "H5Lget_link_info_all:  group_name is NULL");
+        return -1;
+    }
+
+    gName = (char *)(*env)->GetStringUTFChars(env,group_name,&isCopy);
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Lget_link_info_all:  group_name not pinned");
+        return -1;
+    }
+
+    oName = malloc(n * sizeof (*oName));
+    if (oName == NULL) {
+        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+        h5outOfMemory(env, "H5Lget_link_info_all: malloc failed");
+        return -1;
+    }
+    for (i=0; i<n; i++) {
+        oName[i] = NULL;
+    } /* for (i=0; i<n; i++)*/
+    status = H5Lget_link_names_all(env, (hid_t) loc_id, gName,  oName);
+
+    (*env)->ReleaseStringUTFChars(env,group_name,gName);
+    if (status < 0) {
+        h5str_array_free_jhdf5(oName, n);
+        h5libraryError(env);
+    } else {
+        for (i=0; i<n; i++) {
+            if (*(oName+i)) {
+                str = (*env)->NewStringUTF(env,*(oName+i));
+                (*env)->SetObjectArrayElement(env,objName,i,(jobject)str);
+            }
+        } /* for (i=0; i<n; i++)*/
+        h5str_array_free_jhdf5(oName, n);
+    }
+
+    return (jint)status;
+
+}
+
+herr_t H5Lget_link_names_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names )
+{
+    link_info_all_t info;
+    info.env = env;
+    info.name = names;
+    info.count = 0;
+
+    if(H5Literate_by_name(loc_id, group_name, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, link_names_all, (void *)&info, H5P_DEFAULT) < 0)
+        return -1;
+
+    return 0;
+}
+
+herr_t link_names_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata)
+{
+    link_info_all_t* info = (link_info_all_t*)opdata;
+    H5O_info_t obj_info;
+    
+    *(info->name+info->count) = (char *) malloc(strlen(name)+1);
+    if (*(info->name+info->count) == NULL)
+    {
+        h5outOfMemory(info->env, "H5Lget_link_info_all: malloc failed");
+        return -1;
+    }
+    strcpy(*(info->name+info->count), name);
+    
+    info->count++;
+
+    return 0;
+}
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Lget_link_info_all
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Lget_1link_1info_1all
+  (JNIEnv *env, jclass clss, jint loc_id, jstring group_name,
+    jobjectArray objName, jintArray oType, jobjectArray linkName, jint n)
+{
+    herr_t status;
+    char *gName=NULL;
+    char **oName=NULL;
+    char **lName=NULL;
+    jboolean isCopy;
+    jstring str;
+    jint *tarr;
+    int i;
+
+    if (group_name == NULL) {
+        h5nullArgument( env, "H5Lget_link_info_all:  group_name is NULL");
+        return -1;
+    }
+
+    if (oType == NULL) {
+        h5nullArgument( env, "H5Lget_link_info_all:  oType is NULL");
+        return -1;
+    }
+
+    gName = (char *)(*env)->GetStringUTFChars(env,group_name,&isCopy);
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Lget_link_info_all:  group_name not pinned");
+        return -1;
+    }
+
+    tarr = (*env)->GetIntArrayElements(env,oType,&isCopy);
+    if (tarr == NULL) {
+        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+        h5JNIFatalError( env, "H5Lget_link_info_all:  type not pinned");
+        return -1;
+    }
+
+    oName = malloc(n * sizeof (*oName));
+    if (oName == NULL) {
+        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,0);
+        h5outOfMemory(env, "H5Lget_link_info_all: malloc failed");
+        return -1;
+    }
+    for (i=0; i<n; i++) {
+        oName[i] = NULL;
+    } /* for (i=0; i<n; i++)*/
+    if (linkName != NULL)
+    {
+	    lName = malloc(n * sizeof (*lName));
+	    if (lName == NULL) {
+	        (*env)->ReleaseStringUTFChars(env,group_name,gName);
+	        (*env)->ReleaseIntArrayElements(env,oType,tarr,0);
+	        h5str_array_free_jhdf5(oName, n);
+	        h5outOfMemory(env, "H5Lget_link_info_all: malloc failed");
+	        return -1;
+	    }
+	    for (i=0; i<n; i++) {
+	        lName[i] = NULL;
+	    } /* for (i=0; i<n; i++)*/
+	  }
+    status = H5Lget_link_info_all( env, (hid_t) loc_id, gName,  oName, (int *)tarr, lName );
+
+    (*env)->ReleaseStringUTFChars(env,group_name,gName);
+    if (status < 0) {
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,JNI_ABORT);
+        h5str_array_free_jhdf5(oName, n);
+        if (lName != NULL)
+        {
+        	h5str_array_free_jhdf5(lName, n);
+       	}
+        h5libraryError(env);
+    } else {
+        (*env)->ReleaseIntArrayElements(env,oType,tarr,0);
+
+        for (i=0; i<n; i++) {
+            if (*(oName+i)) {
+                str = (*env)->NewStringUTF(env,*(oName+i));
+                (*env)->SetObjectArrayElement(env,objName,i,(jobject)str);
+            }
+        } /* for (i=0; i<n; i++)*/
+        if (linkName != NULL)
+        {
+	        for (i=0; i<n; i++) {
+	            if (*(lName+i)) {
+	                str = (*env)->NewStringUTF(env,*(lName+i));
+	                (*env)->SetObjectArrayElement(env,linkName,i,(jobject)str);
+	            }
+	        } /* for (i=0; i<n; i++)*/
+	        h5str_array_free_jhdf5(lName, n);
+	      }
+        h5str_array_free_jhdf5(oName, n);
+    }
+
+    return (jint)status;
+
+}
+
+herr_t H5Lget_link_info_all( JNIEnv *env, hid_t loc_id, char *group_name, char **names, int *linktypes, char **linknames )
+{
+    link_info_all_t info;
+    info.env = env;
+    info.name = names;
+    info.type = linktypes;
+    info.linkname = linknames;
+    info.count = 0;
+
+    if(H5Literate_by_name(loc_id, group_name, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, link_info_all, (void *)&info, H5P_DEFAULT) < 0)
+        return -1;
+
+    return 0;
+}
+
+herr_t link_info_all(hid_t loc_id, const char *name, const H5L_info_t *link_info, void *opdata)
+{
+    link_info_all_t* info = (link_info_all_t*)opdata;
+    H5O_info_t obj_info;
+   	char *linkval_buf;
+    *(info->name+info->count) = (char *) malloc(strlen(name)+1);
+    if (*(info->name+info->count) == NULL)
+    {
+        h5outOfMemory(info->env, "H5Lget_link_info_all: malloc failed");
+        return -1;
+    }
+    strcpy(*(info->name+info->count), name);
+    
+    if (link_info->type == H5L_TYPE_HARD)
+    {
+      if (info->linkname != NULL)
+      {
+	    	*(info->linkname+info->count) = NULL;
+	    	}
+	    if ( H5Oget_info_by_name(loc_id, name, &obj_info, H5P_DEFAULT) < 0 )
+	    {
+	        *(info->type+info->count) = H5O_TYPE_UNKNOWN;
+	    } else {
+	        *(info->type+info->count) = obj_info.type;
+	    }
+	  } else
+	  {
+      *(info->type+info->count) = H5O_TYPE_NTYPES + link_info->type;
+      if (info->linkname != NULL)
+      {
+	    	linkval_buf = (char*) malloc(link_info->u.val_size);
+		    if (linkval_buf == NULL)
+		    {
+		        h5outOfMemory(info->env, "H5Lget_link_info_all: malloc failed");
+		        return -1;
+		    }
+		    if (H5Lget_val(loc_id, name, linkval_buf, link_info->u.val_size, H5P_DEFAULT) < 0)
+		    {
+	        h5libraryError(info->env);
+	        free(linkval_buf);
+					return -1;	        
+		    }
+		    if (link_info->type == H5L_TYPE_EXTERNAL)
+		    {
+		        *(info->linkname+info->count) = get_external_link( info->env, linkval_buf, link_info->u.val_size );
+		        free(linkval_buf);
+		    } else
+		    {
+		        *(info->linkname+info->count) = linkval_buf;
+		    }
+		  }
+    }
+    info->count++;
+
+    return 0;
+}
diff --git a/source/c/jhdf5/h5oImpJHDF5.c b/source/c/jhdf5/h5oImpJHDF5.c
new file mode 100644
index 0000000..81b180d
--- /dev/null
+++ b/source/c/jhdf5/h5oImpJHDF5.c
@@ -0,0 +1,262 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Group Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+/* missing definitions from hdf5.h */
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+/* delete TRUE and FALSE when fixed in HDF5 */
+
+#include <jni.h>
+#include <stdlib.h>
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    HOGopen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Oopen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+{
+    herr_t status;
+    char* gName;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Oopen:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    gName = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    gName = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+
+    if (gName == NULL) {
+        h5JNIFatalError( env, "H5Oopen:  file name not pinned");
+        return -1;
+    }
+
+    status = H5Oopen((hid_t)loc_id, gName, (hid_t) access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,gName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,gName);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Oclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Oclose
+  (JNIEnv *env, jclass clss, jint group_id)
+{
+    herr_t retVal = 0;
+
+	if (group_id > 0)
+        retVal =  H5Oclose((hid_t)group_id) ;
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    HOcopy
+ * Signature: (ILjava/lang/String;)ILjava/lang/String;)II
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Ocopy
+  (JNIEnv *env, jclass clss, jint src_loc_id, jstring src_name, jint dst_loc_id, jstring dst_name, 
+     jint object_copy_plist, jint link_creation_plist)
+{
+    herr_t status;
+    char *srcName, *dstName;
+    jboolean isCopy;
+
+    if (src_name == NULL) {
+        h5nullArgument( env, "H5Ocopy:  src_name is NULL");
+        return -1;
+    }
+
+    if (dst_name == NULL) {
+        h5nullArgument( env, "H5Ocopy:  dst_name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    srcName = (char *)env->GetStringUTFChars(src_name,&isCopy);
+#else
+    srcName = (char *)(*env)->GetStringUTFChars(env,src_name,&isCopy);
+#endif
+
+    if (srcName == NULL) {
+        h5JNIFatalError( env, "H5Ocopy:  source object name not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    dstName = (char *)env->GetStringUTFChars(dst_name,&isCopy);
+#else
+    dstName = (char *)(*env)->GetStringUTFChars(env,dst_name,&isCopy);
+#endif
+
+    if (dstName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(src_name,srcName);
+#else
+        (*env)->ReleaseStringUTFChars(env,src_name,srcName);
+#endif
+        h5JNIFatalError( env, "H5Ocopy:  destination object name not pinned");
+        return -1;
+    }
+
+    status = H5Ocopy((hid_t)src_loc_id, srcName, (hid_t)dst_loc_id, dstName, 
+                        (hid_t)object_copy_plist, (hid_t)link_creation_plist);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(src_name,srcName);
+    env->ReleaseStringUTFChars(dst_name,dstName);
+#else
+    (*env)->ReleaseStringUTFChars(env,src_name,srcName);
+    (*env)->ReleaseStringUTFChars(env,dst_name,dstName);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Oget_info_by_name
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Oget_1info_1by_1name
+  (JNIEnv *env, jclass clss, jint loc_id, jstring object_name,
+    jlongArray info, jboolean exception_when_non_existent)
+{
+    jint type;
+    herr_t status;
+    jlong *infoP, *infoPP;
+    jint info_len;
+    int minor_err_num;
+    char *oName;
+    jboolean isCopy;
+    H5O_info_t obj_info;
+
+    if (object_name == NULL) {
+        h5nullArgument( env, "H5Oget_info_by_name:  object_name is NULL");
+        return -1;
+    }
+    if (info != NULL)
+    {
+#ifdef __cplusplus
+	    info_len = env->GetArrayLength(info);
+#else
+	    info_len = (*env)->GetArrayLength(env,info);
+#endif
+			if (info_len != 5)
+			{
+			  h5badArgument( env, "H5Oget_info_by_name:  info is not an array of length 5");
+			}        
+    }
+
+#ifdef __cplusplus
+    oName = (char *)env->GetStringUTFChars(object_name,&isCopy);
+#else
+    oName = (char *)(*env)->GetStringUTFChars(env,object_name,&isCopy);
+#endif
+    if (oName == NULL) {
+        h5JNIFatalError( env, "H5Oget_info_by_name:  object_name not pinned");
+        return -1;
+    }
+
+  	status = H5Oget_info_by_name(loc_id, oName, &obj_info, H5P_DEFAULT); 
+    (*env)->ReleaseStringUTFChars(env,object_name,oName);
+    if (status < 0)
+    {
+      if (exception_when_non_existent == JNI_FALSE)
+      {
+          minor_err_num = getMinorErrorNumber();
+          /*
+           * Note: H5E_CANTINSERT is thrown by the dense group lookup, see H5Gdense:534. That is
+           * probably a wrong error code, but we have to deal with it here anyway.
+           */
+          if (minor_err_num  == H5E_NOTFOUND || minor_err_num == H5E_CANTINSERT)
+          {
+              return -1;
+          }
+      }
+      h5libraryError(env);
+      return -1;
+    } else {
+        type = obj_info.type;
+        if (info != NULL)
+        {
+#ifdef __cplusplus
+    			infoP = env->GetPrimitiveArrayCritical(info,&isCopy);
+#else
+    			infoP = (*env)->GetPrimitiveArrayCritical(env,info,&isCopy);
+#endif
+    			if (infoP == NULL) {
+        		h5JNIFatalError( env, "H5Oget_info_by_name:  info not pinned");
+		        return -1;
+			    }
+			    infoPP = infoP;
+			    *infoPP++ = obj_info.fileno;
+			    *infoPP++ = obj_info.addr;
+			    *infoPP++ = obj_info.rc;
+			    *infoPP++ = obj_info.ctime;
+			    *infoPP++ = obj_info.num_attrs;
+#ifdef __cplusplus
+          env->ReleasePrimitiveArrayCritical(info,infoP,0);
+#else
+          (*env)->ReleasePrimitiveArrayCritical(env,info,infoP,0);
+#endif
+			    
+			  }
+    }
+
+    return (jint) type;
+
+}
+
diff --git a/source/c/jhdf5/h5pImpJHDF5.c b/source/c/jhdf5/h5pImpJHDF5.c
new file mode 100755
index 0000000..ba743ca
--- /dev/null
+++ b/source/c/jhdf5/h5pImpJHDF5.c
@@ -0,0 +1,3528 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Property List API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "hdf5.h"
+/* missing definitions from hdf5.h */
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#ifndef TRUE
+#define TRUE (!FALSE)
+#endif
+
+/* delete TRUE and FALSE when fixed in HDF5 */
+
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcreate
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcreate
+  (JNIEnv *env, jclass clss, jint type)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pcreate((hid_t)type );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pclose
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = 0;
+
+	if (plist > 0)
+        retVal =  H5Pclose((hid_t)plist );
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1class
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal = H5P_NO_CLASS;
+    retVal =  H5Pget_class((hid_t) plist );
+    if (retVal == H5P_NO_CLASS) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcopy
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal = -1;
+    retVal =  H5Pcopy((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_version
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1version
+  (JNIEnv *env, jclass clss, jint plist, jintArray version_info)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+
+    if (version_info == NULL) {
+        h5nullArgument( env, "H5Pget_version:  version_info input array is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(version_info) < 4) {
+        h5badArgument( env, "H5Pget_version:  version_info input array < 4");
+        return -1;
+    }
+
+    theArray = (jint *)env->GetIntArrayElements(version_info,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, version_info) < 4) {
+        h5badArgument( env, "H5Pget_version:  version_info input array < 4");
+        return -1;
+    }
+
+    theArray = (jint *)(*env)->GetIntArrayElements(env,version_info,&isCopy);
+#endif
+
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_version:  version_info not pinned");
+        return -1;
+    }
+
+    status = H5Pget_version((hid_t)plist, (unsigned *) &(theArray[0]),
+        (unsigned *) &(theArray[1]), (unsigned *) &(theArray[2]),
+        (unsigned *) &(theArray[3]));
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(version_info,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,version_info,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        if (theArray[0] < 0 || theArray[1] < 0 || theArray[2] < 0 || theArray[3] < 0)
+        {
+            h5raiseException( env, "java/lang/RuntimeException", 
+                              "H5Pget_version:  parameter overflow");
+            return -1;
+        }
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(version_info,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,version_info,theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_userblock
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1userblock
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    long sz;
+    herr_t retVal = -1;
+    sz = (long)size;
+    retVal =  H5Pset_userblock((hid_t)plist, (hsize_t)sz );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_userblock
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1userblock
+  (JNIEnv *env, jclass clss, jint plist, jlongArray size)
+{
+    herr_t status;
+    jlong *theArray;
+    jboolean isCopy;
+    hsize_t s;
+
+    if (size == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pget_userblock:  size is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jlong *)env->GetLongArrayElements(size,&isCopy);
+#else
+    theArray = (jlong *)(*env)->GetLongArrayElements(env,size,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_userblock:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_userblock((hid_t)plist, &s);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(size,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,size,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = s;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(size,theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,size,theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sizes
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1sizes
+  (JNIEnv *env, jclass clss, jint plist, jint sizeof_addr, jint sizeof_size)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_sizes((hid_t)plist, (size_t)sizeof_addr, (size_t)sizeof_size);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sizes
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1sizes
+  (JNIEnv *env, jclass clss, jint plist, jintArray size)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+    size_t ss;
+    size_t sa;
+
+    if (size == NULL) {
+        h5nullArgument( env, "H5Pget_sizes:  size is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(size) < 2) {
+        h5badArgument( env, "H5Pget_sizes:  size input array < 2 elements");
+    }
+    theArray = (jint *)env->GetIntArrayElements(size,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, size) < 2) {
+        h5badArgument( env, "H5Pget_sizes:  size input array < 2 elements");
+    }
+    theArray = (jint *)(*env)->GetIntArrayElements(env,size,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_sizes:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_sizes((hid_t)plist, &sa, &ss);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = sa;
+        theArray[1] = ss;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sym_k
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1sym_1k
+  (JNIEnv *env, jclass clss, jint plist, jint ik, jint lk)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_sym_k((hid_t)plist, (int)ik, (int)lk);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sym_k
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1sym_1k
+  (JNIEnv *env, jclass clss, jint plist, jintArray size)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+
+    if (size == NULL) {
+        h5nullArgument( env, "H5Pget_sym_k:  size is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(size) < 2) {
+        h5badArgument( env, "H5Pget_sym_k:  size < 2 elements");
+    }
+    theArray = (jint *)env->GetIntArrayElements(size,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, size) < 2) {
+        h5badArgument( env, "H5Pget_sym_k:  size < 2 elements");
+    }
+    theArray = (jint *)(*env)->GetIntArrayElements(env,size,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_sym_k:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_sym_k((hid_t)plist, (unsigned *) &(theArray[0]), (unsigned *) &(theArray[1]));
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(size,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,size,theArray,0);
+#endif
+    }
+    if (theArray[0] < 0 || theArray[1] < 0)
+    {
+        h5raiseException( env, "java/lang/RuntimeException", 
+                          "H5Pget_sym_k:  parameter overflow");
+        return -1;
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_istore_k
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1istore_1k
+  (JNIEnv *env, jclass clss, jint plist, jint ik)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_istore_k((hid_t)plist, (int)ik );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_istore_k
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1istore_1k
+  (JNIEnv *env, jclass clss, jint plist, jintArray ik)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+
+    if (ik == NULL) {
+        h5nullArgument( env, "H5Pget_store_k:  ik is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jint *)env->GetIntArrayElements(ik,&isCopy);
+#else
+    theArray = (jint *)(*env)->GetIntArrayElements(env,ik,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_store_k:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_istore_k((hid_t)plist, (unsigned *) &(theArray[0]));
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(ik,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,ik,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(ik,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,ik,theArray,0);
+#endif
+    }
+    if (theArray[0] < 0)
+    {
+        h5raiseException( env, "java/lang/RuntimeException", 
+                          "H5Pget_istore_k:  parameter overflow");
+        return -1;
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_layout
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1layout
+  (JNIEnv *env, jclass clss, jint plist, jint layout)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_layout((hid_t)plist, (H5D_layout_t)layout );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_layout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1layout
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5D_layout_t retVal = H5D_LAYOUT_ERROR;
+    retVal =  H5Pget_layout((hid_t)plist);
+    if (retVal == H5D_LAYOUT_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_libver_bounds
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1libver_1bounds
+  (JNIEnv *env, jclass clss, jint plist, jint low, jint high)
+{
+    herr_t retVal;
+
+    retVal =  H5Pset_libver_bounds((hid_t)plist, (H5F_libver_t)low,
+    	(H5F_libver_t)high);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_libver_bounds
+ * Signature: ([I)I
+ */
+JNIEXPORT jintArray JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1libver_1bounds
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal;
+    jintArray iarray;
+    jint bounds[2];
+
+    retVal =  H5Pget_libver_bounds((hid_t)plist, (H5F_libver_t*)bounds, (H5F_libver_t*)(bounds+1));
+    if (retVal < 0) {
+        h5libraryError(env);
+				return NULL;
+    }
+#ifdef __cplusplus
+    iarray = env->NewIntArray(2);
+#else
+    iarray = (*env)->NewIntArray(env,2);
+#endif
+    if (iarray == NULL) {
+        h5outOfMemory( env,  "H5Pget_libver_bounds" );
+        return NULL;
+    }
+#ifdef __cplusplus
+    env->SetIntArrayRegion(iarray,0,2,bounds);
+#else
+    (*env)->SetIntArrayRegion(env,iarray,0,2,bounds);
+#endif
+    
+    return iarray;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_chunk
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1chunk
+  (JNIEnv *env, jclass clss, jint plist, jint ndims, jbyteArray dim)
+{
+    herr_t status;
+    jbyte *theArray;
+    jboolean isCopy;
+    hsize_t *da;
+    int i;
+    hsize_t *lp;
+    jlong *jlp;
+    int rank;
+
+    if (dim == NULL) {
+        h5nullArgument( env, "H5Pset_chunk:  dim array is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    i = env->GetArrayLength(dim);
+#else
+    i = (*env)->GetArrayLength(env, dim);
+#endif
+    rank = i / sizeof(jlong);
+    if (rank < ndims) {
+        h5badArgument( env, "H5Pset_chunk:  dims array < ndims");
+    }
+#ifdef __cplusplus
+    theArray = (jbyte *)env->GetByteArrayElements(dim,&isCopy);
+#else
+    theArray = (jbyte *)(*env)->GetByteArrayElements(env,dim,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pset_chunk:  dim array not pinned");
+        return -1;
+    }
+    da = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (da == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(dim,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,dim,theArray,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Pset_chunk:  dims not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)theArray;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Pset_chunk((hid_t)plist, (int)ndims, da);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(dim,theArray,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,dim,theArray,JNI_ABORT);
+#endif
+    free(da);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_chunk
+ * Signature: (II[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1chunk
+  (JNIEnv *env, jclass clss, jint plist, jint max_ndims, jlongArray dims)
+{
+    herr_t status;
+    jlong *theArray;
+    jboolean isCopy;
+    hsize_t *da;
+    int i;
+
+    if (dims == NULL) {
+        h5nullArgument( env, "H5Pget_chunk:  dims is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(dims) < max_ndims) {
+        h5badArgument( env, "H5Pget_chunk:  dims array < max_ndims");
+    }
+    theArray = (jlong *)env->GetLongArrayElements(dims,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, dims) < max_ndims) {
+        h5badArgument( env, "H5Pget_chunk:  dims array < max_ndims");
+    }
+    theArray = (jlong *)(*env)->GetLongArrayElements(env,dims,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_chunk:  input dims not pinned");
+        return -1;
+    }
+    da = (hsize_t *)malloc( max_ndims * sizeof(hsize_t));
+    if (da == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(dims, theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,dims, theArray,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Pget_chunk:  dims not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Pget_chunk((hid_t)plist, (int)max_ndims, da);
+
+    if (status < 0)  {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(dims, theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,dims, theArray,JNI_ABORT);
+#endif
+        free (da);
+        h5libraryError(env);
+    } else {
+        for (i= 0; i < max_ndims; i++) {
+            theArray[i] = da[i];
+        }
+        free (da);
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(dims, theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,dims, theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_alignment
+ * Signature: (IJJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1alignment
+  (JNIEnv *env, jclass clss, jint plist, jlong threshold, jlong alignment)
+{
+    long thr;
+    long align;
+    herr_t retVal = -1;
+    thr = (long)threshold;
+    align = (long)alignment;
+    retVal =  H5Pset_alignment((hid_t)plist, (hsize_t)thr, (hsize_t)align);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_alignment
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1alignment
+  (JNIEnv *env, jclass clss, jint plist, jlongArray alignment)
+{
+    herr_t status;
+    jlong *theArray;
+    jboolean isCopy;
+    hsize_t t;
+    hsize_t a;
+
+    if (alignment == NULL) {
+        h5nullArgument( env, "H5Pget_alignment:  input alignment is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(alignment) < 2) {
+        h5badArgument( env, "H5Pget_alignment:  allingment input array < 2");
+    }
+    theArray = (jlong *)env->GetLongArrayElements(alignment,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, alignment) < 2) {
+        h5badArgument( env, "H5Pget_alignment:  allingment input array < 2");
+    }
+    theArray = (jlong *)(*env)->GetLongArrayElements(env,alignment,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_alignment:  input array not pinned");
+        return -1;
+    }
+
+    status = H5Pget_alignment((hid_t)plist, &t, &a);
+
+    if (status < 0)  {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(alignment, theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,alignment, theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = t;
+        theArray[1] = a;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(alignment, theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,alignment, theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_external
+ * Signature: (ILjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1external
+  (JNIEnv *env, jclass clss, jint plist, jstring name, jlong offset, jlong size)
+{
+    herr_t status;
+    char* file;
+    jboolean isCopy;
+    long off;
+    long sz;
+
+    off = (long)offset;
+    sz = (long)size;
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pset_external:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    file = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    file = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (file == NULL) {
+        h5JNIFatalError( env, "H5Pset_external:  name not pinned");
+        return -1;
+    }
+
+    status = H5Pset_external((hid_t)plist, file, (off_t)off, (hsize_t)sz);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,file);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,file);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_external_count
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1external_1count
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    int retVal = -1;
+    retVal =  H5Pget_external_count((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_external
+ * Signature: (III[Ljava/lang/String;[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1external
+  (JNIEnv *env, jclass clss, jint plist, jint idx, jint name_size,
+  jobjectArray name, jlongArray size)
+{
+    herr_t status;
+    jlong *theArray;
+    jboolean isCopy;
+    char *file;
+    jstring str;
+    off_t o;
+    hsize_t s;
+
+    if (name_size < 0) {
+        h5badArgument( env, "H5Pget_external:  name_size < 0");
+        return -1;
+    }
+    else if (name_size == 0) {
+        file = NULL;
+    }
+    else {
+        file = (char *)malloc(sizeof(char)*name_size);
+    }
+
+    if (size != NULL) {
+#ifdef __cplusplus
+        if (env->GetArrayLength(size) < 2) {
+            free(file);
+            h5badArgument( env, "H5Pget_external:  size input array < 2");
+        }
+        theArray = (jlong *)env->GetLongArrayElements(size,&isCopy);
+#else
+        if ((*env)->GetArrayLength(env, size) < 2) {
+            free(file);
+            h5badArgument( env, "H5Pget_external:  size input array < 2");
+        }
+        theArray = (jlong *)(*env)->GetLongArrayElements(env,size,&isCopy);
+#endif
+        if (theArray == NULL) {
+            free(file);
+            h5JNIFatalError( env, "H5Pget_external:  size array not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pget_external((hid_t) plist, (int)idx, (size_t)name_size,
+            file, (off_t *)&o, (hsize_t *)&s);
+
+
+    if (status < 0) {
+        if (size != NULL) {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(size,theArray,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,size,theArray,JNI_ABORT);
+#endif
+        }
+        free(file);
+        h5libraryError(env);
+        return -1;
+    }
+
+    if (size != NULL) {
+        theArray[0] = o;
+        theArray[1] = s;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(size,theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,size,theArray,0);
+#endif
+    }
+
+    if (file != NULL) {
+        /*  NewStringUTF may throw OutOfMemoryError */
+#ifdef __cplusplus
+        str = env->NewStringUTF(file);
+#else
+        str = (*env)->NewStringUTF(env,file);
+#endif
+        if (str == NULL) {
+            free(file);
+            h5outOfMemory( env, "H5Pget_external:  return array not created");
+            return -1;
+        }
+        /*  SetObjectArrayElement may raise exceptions */
+#ifdef __cplusplus
+        env->SetObjectArrayElement(name,0,(jobject)str);
+#else
+        (*env)->SetObjectArrayElement(env,name,0,(jobject)str);
+#endif
+        free(file);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fill_value
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fill_1value
+  (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value)
+{
+    /*
+    unimplemented( env, "H5Pset_fill_value:  not implemented yet");
+    return -1;
+    */
+    jint status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+#ifdef __cplusplus
+    byteP = env->GetByteArrayElements(value,&isCopy);
+#else
+    byteP = (*env)->GetByteArrayElements(env,value,&isCopy);
+#endif
+    status = H5Pset_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(value,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,value,byteP,JNI_ABORT);
+#endif
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fill_value
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1fill_1value
+  (JNIEnv *env, jclass clss, jint plist_id, jint type_id, jbyteArray value)
+{
+    jint status;
+    jbyte *byteP;
+    jboolean isCopy;
+
+#ifdef __cplusplus
+    byteP = env->GetByteArrayElements(value,&isCopy);
+#else
+    byteP = (*env)->GetByteArrayElements(env,value,&isCopy);
+#endif
+    status = H5Pget_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+
+#ifdef __cplusplus
+    if (status < 0)
+        env->ReleaseByteArrayElements(value,byteP,JNI_ABORT);
+    else
+        env->ReleaseByteArrayElements(value,byteP,0);
+#else
+    if (status < 0)
+        (*env)->ReleaseByteArrayElements(env,value,byteP,JNI_ABORT);
+    else
+        (*env)->ReleaseByteArrayElements(env,value,byteP,0);
+#endif
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_filter
+ * Signature: (IIII[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+  jint cd_nelmts, jintArray cd_values)
+{
+    herr_t status;
+    jint *theArray;
+    jboolean isCopy;
+
+    if (cd_values == NULL)
+        status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+            (unsigned int)flags, (size_t)cd_nelmts, NULL);
+    else
+    {
+#ifdef __cplusplus
+        theArray = (jint *)env->GetIntArrayElements(cd_values,&isCopy);
+#else
+        theArray = (jint *)(*env)->GetIntArrayElements(env,cd_values,&isCopy);
+#endif
+        if (theArray == NULL) {
+            h5JNIFatalError(env,  "H5Pset_filter:  input array  not pinned");
+            return -1;
+        }
+        status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+            (unsigned int)flags, (size_t)cd_nelmts, (const unsigned int *)theArray);
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(cd_values,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,cd_values,theArray,JNI_ABORT);
+#endif
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_nfilters
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1nfilters
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    int retVal = -1;
+    retVal =  H5Pget_nfilters((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter
+ * Signature: (II[I[IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter_number, jintArray flags,
+  jintArray cd_nelmts, jintArray cd_values, jint namelen, jobjectArray name)
+{
+    herr_t status;
+    jint *flagsArray, *cd_nelmtsArray, *cd_valuesArray;
+    jboolean isCopy;
+    char *filter;
+    jstring str;
+
+    if (namelen <= 0) {
+        h5badArgument( env, "H5Pget_filter:  namelen <= 0");
+        return -1;
+    }
+    if (flags == NULL) {
+        h5badArgument( env, "H5Pget_filter:  flags is NULL");
+        return -1;
+    }
+    if (cd_nelmts == NULL) {
+        h5badArgument( env, "H5Pget_filter:  cd_nelmts is NULL");
+        return -1;
+    }
+    if (cd_values == NULL) {
+        h5badArgument( env, "H5Pget_filter:  cd_values is NULL");
+        return -1;
+    }
+    filter = (char *)malloc(sizeof(char)*namelen);
+    if (filter == NULL) {
+        h5outOfMemory( env, "H5Pget_filter:  namelent malloc failed");
+        return -1;
+    }
+#ifdef __cplusplus
+    flagsArray = (jint *)env->GetIntArrayElements(flags,&isCopy);
+#else
+    flagsArray = (jint *)(*env)->GetIntArrayElements(env,flags,&isCopy);
+#endif
+    if (flagsArray == NULL) {
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  flags array not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    cd_nelmtsArray = (jint *)env->GetIntArrayElements(cd_nelmts,&isCopy);
+#else
+    cd_nelmtsArray = (jint *)(*env)->GetIntArrayElements(env,cd_nelmts,&isCopy);
+#endif
+    if (cd_nelmtsArray == NULL) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(flags,flagsArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,flags,flagsArray,JNI_ABORT);
+#endif
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  nelmts array not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    cd_valuesArray = (jint *)env->GetIntArrayElements(cd_values,&isCopy);
+#else
+    cd_valuesArray = (jint *)(*env)->GetIntArrayElements(env,cd_values,&isCopy);
+#endif
+    if (cd_valuesArray == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(cd_nelmts,cd_nelmtsArray,JNI_ABORT);
+        env->ReleaseIntArrayElements(flags,flagsArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,cd_nelmts,cd_nelmtsArray,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,flags,flagsArray,JNI_ABORT);
+#endif
+        free(filter);
+        h5JNIFatalError(env,  "H5Pget_filter:  elmts array not pinned");
+        return -1;
+    }
+
+    status = H5Pget_filter((hid_t)plist, (int)filter_number, (unsigned int *)flagsArray,
+          (size_t *)cd_nelmtsArray, (unsigned int *)cd_valuesArray, (size_t)namelen, filter, (unsigned int*)NULL);
+
+    if (status < 0)
+    {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(cd_values,cd_valuesArray,JNI_ABORT);
+        env->ReleaseIntArrayElements(cd_nelmts,cd_nelmtsArray,JNI_ABORT);
+        env->ReleaseIntArrayElements(flags,flagsArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,cd_values,cd_valuesArray,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,cd_nelmts,cd_nelmtsArray,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,flags,flagsArray,JNI_ABORT);
+#endif
+        free(filter);
+        h5libraryError(env);
+    }
+    else
+    {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(cd_values,cd_valuesArray,0);
+        env->ReleaseIntArrayElements(cd_nelmts,cd_nelmtsArray,0);
+        env->ReleaseIntArrayElements(flags,flagsArray,0);
+        /*  NewStringUTF may throw OutOfMemoryError */
+        str = env->NewStringUTF(filter);
+#else
+        (*env)->ReleaseIntArrayElements(env,cd_values,cd_valuesArray,0);
+        (*env)->ReleaseIntArrayElements(env,cd_nelmts,cd_nelmtsArray,0);
+        (*env)->ReleaseIntArrayElements(env,flags,flagsArray,0);
+        /*  NewStringUTF may throw OutOfMemoryError */
+        str = (*env)->NewStringUTF(env,filter);
+#endif
+        if (str == NULL) {
+            free(filter);
+            h5JNIFatalError(env,  "H5Pget_filter:  return string not pinned");
+            return -1;
+        }
+        free(filter);
+        /*  SetObjectArrayElement may throw exceptiosn */
+#ifdef __cplusplus
+        env->SetObjectArrayElement(name,0,(jobject)str);
+#else
+        (*env)->SetObjectArrayElement(env,name,0,(jobject)str);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_driver
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1driver
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    hid_t retVal =  -1;
+    retVal =  H5Pget_driver((hid_t) plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+#ifdef removed
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_stdio
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1stdio
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_stdio((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_stdio
+ * Signature: (I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1stdio
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retValue;
+    retValue = H5Pget_stdio((hid_t)plist);
+
+    if (retValue >= 0) {
+        return JNI_TRUE;
+    } else {
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_sec2
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1sec2
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_sec2((hid_t) plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_sec2
+ * Signature: (I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1sec2
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retValue;
+
+    retValue =  H5Pget_sec2((hid_t)plist);
+
+    if (retValue >= 0) {
+        return JNI_TRUE;
+    } else {
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_core
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1core
+  (JNIEnv *env, jclass clss, jint plist, jint increment)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_core((hid_t)plist, (size_t)increment);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_core
+ * Signature: (I[I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1core
+  (JNIEnv *env, jclass clss, jint plist, jintArray increment)
+{
+    jboolean isCopy;
+    herr_t status;
+    jint *theArray = NULL;
+
+    if (increment != NULL) {
+        theArray = (jint *)(*env)->GetIntArrayElements(env,increment,&isCopy);
+        if (theArray == NULL) {
+            h5JNIFatalError(env,  "H5Pget_core:  input array not pinned");
+            return JNI_FALSE;
+        }
+    }
+    status = H5Pget_core((hid_t)plist, (size_t *)&(theArray[0]));
+
+    if (status < 0) {
+        (*env)->ReleaseIntArrayElements(env,increment,theArray,JNI_ABORT);
+        return JNI_FALSE;
+    } else {
+        (*env)->ReleaseIntArrayElements(env,increment,theArray,0);
+        return JNI_TRUE;
+    }
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_split
+ * Signature: (ILjava/lang/String;ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1split
+  (JNIEnv *env, jclass clss, jint plist, jstring meta_ext, jint meta_plist,
+  jstring raw_ext, jint raw_plist)
+{
+    herr_t status;
+    char *meta, *raw;
+    jboolean isCopy;
+
+    if (meta_ext == NULL) {
+        meta = (char *)NULL;
+    } else {
+        meta = (char *)(*env)->GetStringUTFChars(env,meta_ext,&isCopy);
+        if (meta == NULL) {
+            h5JNIFatalError(env,  "H5Pset_split:  meta not pinned");
+            return -1;
+        }
+    }
+
+    if (raw_ext == NULL) {
+        raw = (char *)NULL;
+    } else {
+        raw = (char *)(*env)->GetStringUTFChars(env,raw_ext,&isCopy);
+        if (raw == NULL) {
+            (*env)->ReleaseStringUTFChars(env,meta_ext,meta);
+            h5JNIFatalError(env,  "H5Pset_split:  raw not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pset_split((hid_t)plist, meta, (hid_t)meta_plist, raw, (hid_t)raw_plist);
+    (*env)->ReleaseStringUTFChars(env,raw_ext,raw);
+    (*env)->ReleaseStringUTFChars(env,meta_ext,meta);
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_split
+ * Signature: (II[Ljava/lang/String;[II[Ljava/lang/String;[I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1split
+  (JNIEnv *env, jclass clss, jint plist, jint meta_ext_size, jobjectArray meta_ext,
+  jintArray meta_properties, jint raw_ext_size, jobjectArray raw_ext,
+  jintArray raw_properties)
+{
+    jint status;
+    jint *metaArray, *rawArray;
+    jboolean isCopy;
+    char *meta, *raw;
+    jstring meta_str, raw_str;
+
+    if (meta_ext == NULL) {
+        metaArray = NULL;
+    } else {
+        if (meta_ext_size <=0 ) {
+            h5badArgument( env, "H5Pget_split:  meta_ext_size <=0");
+        }
+        meta = (char *)malloc(sizeof(char)*meta_ext_size);
+        if (meta == NULL) {
+            h5JNIFatalError(env,  "H5Pget_split:  meta not pinned");
+            return -1;
+        }
+    }
+    if (raw_ext == NULL ) {
+        rawArray = NULL;
+    } else {
+        if (raw_ext_size <=0 ) {
+            h5badArgument( env, "H5Pget_split:  raw_ext_size <=0");
+            return -1;
+        }
+        raw = (char *)malloc(sizeof(char)*raw_ext_size);
+        if (raw == NULL) {
+            free(meta);
+            h5JNIFatalError(env,  "H5Pget_split:  raw not pinned");
+            return -1;
+        }
+    }
+    metaArray = (jint *)(*env)->GetIntArrayElements(env,meta_properties,&isCopy);
+    if (metaArray == NULL) {
+        free(raw);
+        free(meta);
+        h5JNIFatalError(env,  "H5Pget_split:  metaArray not pinned");
+        return -1;
+    }
+    rawArray = (jint *)(*env)->GetIntArrayElements(env,raw_properties,&isCopy);
+    if (rawArray == NULL) {
+        (*env)->ReleaseIntArrayElements(env,meta_properties,metaArray,JNI_ABORT);
+        free(raw);
+        free(meta);
+        h5JNIFatalError(env,  "H5Pget_split:  rawArray not pinned");
+        return -1;
+    }
+
+    status = H5Pget_split((hid_t)plist, (size_t)meta_ext_size, meta,
+        (hid_t *)metaArray, (size_t)raw_ext_size, raw, (hid_t *)rawArray);
+
+    if (status < 0)
+    {
+        (*env)->ReleaseIntArrayElements(env,raw_properties,rawArray,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,meta_properties,metaArray,JNI_ABORT);
+        free(raw);
+        free(meta);
+        h5libraryError(env);
+    }
+    else
+    {
+        (*env)->ReleaseIntArrayElements(env,raw_properties,rawArray,0);
+        (*env)->ReleaseIntArrayElements(env,meta_properties,metaArray,0);
+        /*  NewStringUTF may throw OutOfMemoryError */
+        meta_str = (*env)->NewStringUTF(env,meta);
+        if (meta_str == NULL) {
+            free(raw);
+            free(meta);
+            h5JNIFatalError(env,  "H5Pget_split:  return meta_str not pinned");
+            return -1;
+        }
+        /*  SetObjectArrayElement may throw exceptions */
+        (*env)->SetObjectArrayElement(env,meta_ext,0,(jobject)meta_str);
+        free(meta);
+        /*  NewStringUTF may throw OutOfMemoryError */
+        raw_str = (*env)->NewStringUTF(env,raw);
+        if (meta_str == NULL) {
+            free(raw);
+            h5JNIFatalError(env,  "H5Pget_split:  return raw_str not pinned");
+            return -1;
+        }
+        /*  SetObjectArrayElement may throw exceptions */
+        (*env)->SetObjectArrayElement(env,raw_ext,0,(jobject)raw_str);
+        free(raw);
+    }
+
+    return (jint)status;
+
+}
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_cache
+ * Signature: (IIID)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1cache
+  (JNIEnv *env, jclass clss, jint plist, jint mdc_nelmts, jint rdcc_nelmts,
+  jint rdcc_nbytes, jdouble rdcc_w0)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_cache((hid_t)plist, (int)mdc_nelmts, (int)rdcc_nelmts,
+        (size_t)rdcc_nbytes, (double) rdcc_w0);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_cache
+ * Signature: (I[I[I[D)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1cache
+  (JNIEnv *env, jclass clss, jint plist, jintArray mdc_nelmts,
+  jintArray rdcc_nelmts, jintArray rdcc_nbytes, jdoubleArray rdcc_w0)
+{
+    herr_t status;
+    jint mode;
+    jdouble *w0Array;
+    jint *mdc_nelmtsArray, *rdcc_nelmtsArray, *nbytesArray;
+    jboolean isCopy;
+
+    if (mdc_nelmts == NULL) {
+        h5nullArgument( env, "H5Pget_gache:  mdc_nelmts is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    mdc_nelmtsArray = (jint *)env->GetIntArrayElements(mdc_nelmts,&isCopy);
+#else
+    mdc_nelmtsArray = (jint *)(*env)->GetIntArrayElements(env,mdc_nelmts,&isCopy);
+#endif
+    if (mdc_nelmtsArray == NULL) {
+        h5JNIFatalError(env,  "H5Pget_cache:  mdc_nelmts array not pinned");
+        return -1;
+    }
+
+    if (rdcc_w0 == NULL) {
+        w0Array = (jdouble *)NULL;
+    } else {
+#ifdef __cplusplus
+        w0Array = (jdouble *)env->GetDoubleArrayElements(rdcc_w0,&isCopy);
+#else
+        w0Array = (jdouble *)(*env)->GetDoubleArrayElements(env,rdcc_w0,&isCopy);
+#endif
+        if (w0Array == NULL) {
+#ifdef __cplusplus
+            env->ReleaseIntArrayElements(mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+#else
+            (*env)->ReleaseIntArrayElements(env,mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+#endif
+            h5JNIFatalError(env,  "H5Pget_cache:  w0_array array not pinned");
+            return -1;
+        }
+    }
+
+    if (rdcc_nelmts == NULL) {
+        rdcc_nelmtsArray = (jint *) NULL;
+    } else {
+#ifdef __cplusplus
+        rdcc_nelmtsArray = (jint *)env->GetIntArrayElements(rdcc_nelmts,&isCopy);
+#else
+        rdcc_nelmtsArray = (jint *)(*env)->GetIntArrayElements(env,rdcc_nelmts,&isCopy);
+#endif
+        if (rdcc_nelmtsArray == NULL) {
+#ifdef __cplusplus
+            env->ReleaseIntArrayElements(mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+#else
+            (*env)->ReleaseIntArrayElements(env,mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+#endif
+            /* exception -- out of memory */
+            if (w0Array != NULL) {
+#ifdef __cplusplus
+                env->ReleaseDoubleArrayElements(rdcc_w0,w0Array,JNI_ABORT);
+#else
+                (*env)->ReleaseDoubleArrayElements(env,rdcc_w0,w0Array,JNI_ABORT);
+#endif
+            }
+            h5JNIFatalError(env,  "H5Pget_cache:  rdcc_nelmts array not pinned");
+            return -1;
+        }
+    }
+
+    if (rdcc_nbytes == NULL) {
+        nbytesArray = (jint *) NULL;
+    } else {
+#ifdef __cplusplus
+        nbytesArray = (jint *)env->GetIntArrayElements(rdcc_nbytes,&isCopy);
+#else
+        nbytesArray = (jint *)(*env)->GetIntArrayElements(env,rdcc_nbytes,&isCopy);
+#endif
+        if (nbytesArray == NULL) {
+#ifdef __cplusplus
+            env->ReleaseIntArrayElements(mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+            if (w0Array != NULL) {
+                env->ReleaseDoubleArrayElements(rdcc_w0,w0Array,JNI_ABORT);
+            }
+            if (rdcc_nelmtsArray != NULL) {
+                env->ReleaseIntArrayElements(rdcc_nelmts,rdcc_nelmtsArray,JNI_ABORT);
+            }
+#else
+            (*env)->ReleaseIntArrayElements(env,mdc_nelmts,mdc_nelmtsArray,JNI_ABORT);
+            if (w0Array != NULL) {
+                (*env)->ReleaseDoubleArrayElements(env,rdcc_w0,w0Array,JNI_ABORT);
+            }
+            if (rdcc_nelmtsArray != NULL) {
+                (*env)->ReleaseIntArrayElements(env,rdcc_nelmts,rdcc_nelmtsArray,JNI_ABORT);
+            }
+#endif
+            h5JNIFatalError(env,  "H5Pget_cache:  nbytesArray array not pinned");
+            return -1;
+        }
+    }
+
+    status = H5Pget_cache((hid_t)plist, (int *)mdc_nelmtsArray, (size_t *)rdcc_nelmtsArray, (size_t *)nbytesArray,
+        w0Array);
+
+    if (status < 0) {
+        mode = JNI_ABORT;
+    } else {
+        mode = 0; /* commit and free */
+    }
+
+#ifdef __cplusplus
+    env->ReleaseIntArrayElements(mdc_nelmts,mdc_nelmtsArray,mode);
+#else
+    (*env)->ReleaseIntArrayElements(env,mdc_nelmts,mdc_nelmtsArray,mode);
+#endif
+
+    if (rdcc_nelmtsArray != NULL) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(rdcc_nelmts,rdcc_nelmtsArray,mode);
+#else
+        (*env)->ReleaseIntArrayElements(env,rdcc_nelmts,rdcc_nelmtsArray,mode);
+#endif
+    }
+
+    if (nbytesArray != NULL) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(rdcc_nbytes,nbytesArray,mode);
+#else
+        (*env)->ReleaseIntArrayElements(env,rdcc_nbytes,nbytesArray,mode);
+#endif
+    }
+
+    if (w0Array != NULL) {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(rdcc_w0,w0Array,mode);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,rdcc_w0,w0Array,mode);
+#endif
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+#ifdef notdef
+
+/* DON'T IMPLEMENT THIS!!! */
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_buffer
+ * Signature: (II[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1buffer
+  (JNIEnv *env, jclass clss, jint plist, jint size, jbyteArray tconv, jbyteArray bkg)
+{
+    jint status;
+    jbyte *tconvP, *bkgP;
+    jboolean isCopy;
+
+    if (tconv == NULL)
+        tconvP = (jbyte *)NULL;
+    else
+        tconvP = (*env)->GetByteArrayElements(env,tconv,&isCopy);
+    if (bkg == NULL)
+        bkgP = (jbyte *)NULL;
+    else
+        bkgP = (*env)->GetByteArrayElements(env,bkg,&isCopy);
+
+    status = H5Pset_buffer((hid_t)plist, (size_t)size, tconvP, bkgP);
+
+    if (tconv != NULL)
+        (*env)->ReleaseByteArrayElements(env,tconv,tconvP,JNI_ABORT);
+    if (bkg != NULL)
+        (*env)->ReleaseByteArrayElements(env,bkg,bkgP,JNI_ABORT);
+
+    return status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_buffer
+ * Signature: (I[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1buffer
+  (JNIEnv *env, jclass clss, jint plist, jbyteArray tconv, jbyteArray bkg)
+{
+    jint status;
+    jbyte *tconvP, *bkgP;
+    jboolean isCopy;
+
+    tconvP = (*env)->GetByteArrayElements(env,tconv,&isCopy);
+    bkgP = (*env)->GetByteArrayElements(env,bkg,&isCopy);
+    status = H5Pget_buffer((hid_t)plist, tconvP, bkgP);
+
+    if (status < 0)
+    {
+        (*env)->ReleaseByteArrayElements(env,tconv,tconvP,JNI_ABORT);
+        (*env)->ReleaseByteArrayElements(env,bkg,bkgP,JNI_ABORT);
+    }
+    else
+    {
+        (*env)->ReleaseByteArrayElements(env,tconv,tconvP,0);
+        (*env)->ReleaseByteArrayElements(env,bkg,bkgP,0);
+    }
+
+    return status;
+}
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_preserve
+ * Signature: (IB)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1preserve
+  (JNIEnv *env, jclass clss, jint plist, jboolean status)
+{
+    hbool_t st;
+    herr_t retVal = -1;
+
+    if (status == JNI_TRUE) {
+        st = TRUE;
+    } else if (status == JNI_FALSE) {
+        st = FALSE;
+    } else {
+        /* exception -- bad argument */
+        h5badArgument( env, "H5Pset_preserve:  status not TRUE or FALSE");
+        return -1;
+    }
+    retVal =  H5Pset_preserve((hid_t)plist, (hbool_t)st);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_preserve
+ * Signature: (I)B
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1preserve
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retValue;
+    retValue = H5Pget_preserve((hid_t)plist);
+    if (retValue < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_deflate
+ * Signature: (II)B
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1deflate
+  (JNIEnv *env, jclass clss, jint plist, jint level)
+{
+    herr_t retValue;
+    retValue = H5Pset_deflate((hid_t)plist, (int)level);
+    if (retValue < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_nbit
+ * Signature: (I)B
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1nbit
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retValue;
+    retValue = H5Pset_nbit((hid_t)plist);
+    if (retValue < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_scaleoffset
+ * Signature: (III)B
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1scaleoffset
+  (JNIEnv *env, jclass clss, jint plist, jint scale_type, jint scale_factor)
+{
+    herr_t retValue;
+    retValue = H5Pset_scaleoffset((hid_t)plist, scale_type, scale_factor);
+    if (retValue < 0) {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_gc_references
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1gc_1references
+  (JNIEnv *env, jclass clss, jint fapl_id, jboolean gc_ref)
+  {
+    herr_t retVal;
+    unsigned gc_ref_val;
+    if (gc_ref == JNI_TRUE) {
+        gc_ref_val = 1;
+    } else {
+        gc_ref_val = 0;
+    }
+    retVal = H5Pset_gc_references((hid_t)fapl_id, gc_ref_val);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+    return (jint)retVal;
+}
+
+#ifdef remove
+#ifdef USE_H5_1_2_1
+#define GET_GC H5Pget_gc_reference
+#else
+#define GET_GC H5Pget_gc_references
+#endif
+#endif
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_gc_references
+ * Signature: (I[Z)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1gc_1references
+  (JNIEnv *env, jclass clss, jint fapl_id, jbooleanArray gc_ref)
+{
+    herr_t status;
+    jboolean *theArray;
+    jboolean isCopy;
+    unsigned gc_ref_val = 0;
+
+    if (gc_ref == NULL) {
+        h5nullArgument( env, "H5Pget_gc_references:  gc_ref input array is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    theArray = (jboolean *)env->GetBooleanArrayElements(gc_ref,&isCopy);
+#else
+    theArray = (jboolean *)(*env)->GetBooleanArrayElements(env,gc_ref,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_gc_references:  gc_ref not pinned");
+        return -1;
+    }
+
+    status = H5Pget_gc_references((hid_t)fapl_id, (unsigned *)&gc_ref_val);
+#ifdef removed
+    status = GET_GC((hid_t)fapl_id, (unsigned *)&gc_ref_val);
+#endif
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseBooleanArrayElements(gc_ref,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseBooleanArrayElements(env,gc_ref,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        if (gc_ref_val == 1) {
+            theArray[0] = JNI_TRUE;
+        } else {
+            theArray[0] = JNI_FALSE;
+        }
+#ifdef __cplusplus
+        env->ReleaseBooleanArrayElements(gc_ref,theArray,0);
+#else
+        (*env)->ReleaseBooleanArrayElements(env,gc_ref,theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_btree_ratios
+ * Signature: (IDDD)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1btree_1ratios
+  (JNIEnv *env, jclass clss, jint plist_id, jdouble left, jdouble middle, jdouble right)
+{
+    herr_t status;
+
+    status = H5Pset_btree_ratios((hid_t)plist_id, (double)left,(double)middle, (double)right);
+
+    if (status < 0) {
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_btree_ratios
+ * Signature: (I[D[D[D)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1btree_1ratios
+  (JNIEnv *env, jclass clss, jint plist_id, jdoubleArray left, jdoubleArray middle, jdoubleArray right)
+{
+    herr_t status;
+    jdouble *leftP, *middleP, *rightP;
+    jboolean isCopy;
+
+    if (left == NULL) {
+        h5nullArgument( env, "H5Pget_btree_ratios:  left input array is NULL");
+        return -1;
+    }
+
+    if (middle == NULL) {
+        h5nullArgument( env, "H5Pget_btree_ratios:  middle input array is NULL");
+        return -1;
+    }
+
+    if (right == NULL) {
+        h5nullArgument( env, "H5Pget_btree_ratios:  right input array is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    leftP = (jdouble *)env->GetDoubleArrayElements(left,&isCopy);
+#else
+    leftP = (jdouble *)(*env)->GetDoubleArrayElements(env,left,&isCopy);
+#endif
+    if (leftP == NULL) {
+        h5JNIFatalError( env, "H5Pget_btree_ratios:  left not pinned");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    middleP = (jdouble *)env->GetDoubleArrayElements(middle,&isCopy);
+#else
+    middleP = (jdouble *)(*env)->GetDoubleArrayElements(env,middle,&isCopy);
+#endif
+    if (middleP == NULL) {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(left,leftP,JNI_ABORT);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,left,leftP,JNI_ABORT);
+#endif
+        h5JNIFatalError( env, "H5Pget_btree_ratios:  middle not pinned");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    rightP = (jdouble *)env->GetDoubleArrayElements(right,&isCopy);
+#else
+    rightP = (jdouble *)(*env)->GetDoubleArrayElements(env,right,&isCopy);
+#endif
+    if (rightP == NULL) {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(left,leftP,JNI_ABORT);
+        env->ReleaseDoubleArrayElements(middle,middleP,JNI_ABORT);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,left,leftP,JNI_ABORT);
+        (*env)->ReleaseDoubleArrayElements(env,middle,middleP,JNI_ABORT);
+#endif
+        h5JNIFatalError( env, "H5Pget_btree_ratios:  middle not pinned");
+        return -1;
+    }
+
+    status = H5Pget_btree_ratios((hid_t)plist_id, (double *)leftP,
+        (double *)middleP, (double *)rightP);
+
+    if (status < 0)  {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(left,leftP,JNI_ABORT);
+        env->ReleaseDoubleArrayElements(middle,middleP,JNI_ABORT);
+        env->ReleaseDoubleArrayElements(right,rightP,JNI_ABORT);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,left,leftP,JNI_ABORT);
+        (*env)->ReleaseDoubleArrayElements(env,middle,middleP,JNI_ABORT);
+        (*env)->ReleaseDoubleArrayElements(env,right,rightP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+#ifdef __cplusplus
+        env->ReleaseDoubleArrayElements(left,leftP,0);
+        env->ReleaseDoubleArrayElements(middle,middleP,0);
+        env->ReleaseDoubleArrayElements(right,rightP,0);
+#else
+        (*env)->ReleaseDoubleArrayElements(env,left,leftP,0);
+        (*env)->ReleaseDoubleArrayElements(env,middle,middleP,0);
+        (*env)->ReleaseDoubleArrayElements(env,right,rightP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_small_data_block_size
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1small_1data_1block_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong size)
+{
+    long sz;
+    herr_t retVal = -1;
+    sz = (long)size;
+    retVal =  H5Pset_small_data_block_size((hid_t)plist, (hsize_t)sz );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_small_data_block_size
+ * Signature: (I[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1small_1data_1block_1size
+  (JNIEnv *env, jclass clss, jint plist, jlongArray size)
+{
+    herr_t status;
+    jlong *theArray;
+    jboolean isCopy;
+    hsize_t s;
+
+    if (size == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pget_small_user_block_size:  size is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jlong *)env->GetLongArrayElements(size,&isCopy);
+#else
+    theArray = (jlong *)(*env)->GetLongArrayElements(env,size,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_userblock:  size not pinned");
+        return -1;
+    }
+
+    status = H5Pget_small_data_block_size((hid_t)plist, &s);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(size,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,size,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = s;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(size,theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,size,theArray,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/***************************************************************
+ *                   New APIs for HDF5.1.6                     *
+ ***************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_alloc_time(hid_t plist_id, H5D_alloc_time_t alloc_time )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1alloc_1time
+  (JNIEnv *env, jclass clss, jint plist, jint alloc_time)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_alloc_time((hid_t)plist, (H5D_alloc_time_t)alloc_time );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_alloc_time(hid_t plist_id, H5D_alloc_time_t *alloc_time )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1alloc_1time
+  (JNIEnv *env, jclass clss, jint plist, jintArray alloc_time)
+{
+    herr_t retVal = -1;
+    jint *theArray;
+    jboolean isCopy;
+    H5D_alloc_time_t time;
+
+
+    if (alloc_time == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pget_alloc_time:  alloc_time is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jint *)env->GetIntArrayElements(alloc_time,&isCopy);
+#else
+    theArray = (jint *)(*env)->GetIntArrayElements(env,alloc_time,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_alloc_time:  alloc_time not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_alloc_time((hid_t)plist, &time );
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(alloc_time,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,alloc_time,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = time;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(alloc_time,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,alloc_time,theArray,0);
+#endif
+    }
+
+    return (jint)retVal;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fill_time(hid_t plist_id, H5D_fill_time_t fill_time )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fill_1time
+  (JNIEnv *env, jclass clss, jint plist, jint fill_time)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_fill_time((hid_t)plist, (H5D_fill_time_t)fill_time );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fill_time(hid_t plist_id, H5D_fill_time_t *fill_time )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1fill_1time
+  (JNIEnv *env, jclass clss, jint plist, jintArray fill_time)
+{
+    herr_t retVal = -1;
+    jint *theArray;
+    jboolean isCopy;
+    H5D_fill_time_t time;
+
+
+    if (fill_time == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pget_fill_time:  fill_time is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jint *)env->GetIntArrayElements(fill_time,&isCopy);
+#else
+    theArray = (jint *)(*env)->GetIntArrayElements(env,fill_time,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_fill_time:  fill_time not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_fill_time((hid_t)plist, &time );
+
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(fill_time,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,fill_time,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = time;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(fill_time,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,fill_time,theArray,0);
+#endif
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pfill_value_defined(hid_t plist_id, H5D_fill_value_t *status )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pfill_1value_1defined
+  (JNIEnv *env, jclass clss, jint plist, jintArray status)
+{
+    herr_t retVal = -1;
+    jint *theArray;
+    jboolean isCopy;
+    H5D_fill_value_t value;
+
+
+    if (status == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pfill_value_defined:  status is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    theArray = (jint *)env->GetIntArrayElements(status,&isCopy);
+#else
+    theArray = (jint *)(*env)->GetIntArrayElements(env,status,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pfill_value_defined:  status not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pfill_value_defined((hid_t)plist, &value );
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(status,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,status,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = value;
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(status,theArray,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,status,theArray,0);
+#endif
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fletcher32(hid_t plist)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fletcher32
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_fletcher32((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_edc_check(hid_t plist, H5Z_EDC_t check)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1edc_1check
+  (JNIEnv *env, jclass clss, jint plist, jint check)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_edc_check((hid_t)plist, (H5Z_EDC_t)check );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_edc_check(hid_t plist)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1edc_1check
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5Z_EDC_t retVal = -1;
+
+    retVal =  H5Pget_edc_check((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_shuffle(hid_t plist_id)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1shuffle
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_shuffle((hid_t)plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_szip(hid_t plist, unsigned int options_mask, unsigned int pixels_per_block)
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1szip
+  (JNIEnv *env, jclass clss, jint plist, jint options_mask, jint pixels_per_block)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_szip((hid_t)plist, (unsigned int)options_mask, (unsigned int)pixels_per_block);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_hyper_vector_size(hid_t dxpl_id, size_t vector_size )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1hyper_1vector_1size
+  (JNIEnv *env, jclass clss, jint plist, jlong vector_size)
+{
+    herr_t retVal = -1;
+
+    retVal =  H5Pset_hyper_vector_size((hid_t)plist, (size_t)vector_size);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_hyper_vector_size(hid_t dxpl_id, size_t *vector_size )
+ * Signature: (IJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1hyper_1vector_1size
+  (JNIEnv *env, jclass clss, jint plist, jlongArray vector_size)
+{
+    herr_t retVal = -1;
+    jlong *theArray;
+    size_t size;
+    jboolean isCopy;
+
+    if (vector_size == NULL) {
+        /* exception ? */
+        h5nullArgument( env, "H5Pget_hyper_vector_size:  vector_size is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    theArray = (jlong *)env->GetLongArrayElements(vector_size,&isCopy);
+#else
+    theArray = (jlong *)(*env)->GetLongArrayElements(env,vector_size,&isCopy);
+#endif
+    if (theArray == NULL) {
+        h5JNIFatalError( env, "H5Pget_hyper_vector_size:  vector_size not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_hyper_vector_size((hid_t)plist, &size);
+
+    if (retVal < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(vector_size,theArray,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,vector_size,theArray,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        theArray[0] = size;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(vector_size,theArray,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,vector_size,theArray,0);
+#endif
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pall_filters_avail(hid_t dcpl_id)
+ * Signature: (I)J
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pall_1filters_1avail
+  (JNIEnv *env, jclass clss, jint dcpl_id)
+{
+    htri_t bval;
+    bval = H5Pall_filters_avail((hid_t)dcpl_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pmodify_filter(hid_t plist, H5Z_filter_t filter,
+ *                unsigned int flags, size_t cd_nelmts, const unsigned int cd_values[] )
+ * Signature: (III[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pmodify_1filter
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jint flags,
+  jlong cd_nelmts, jintArray cd_values)
+{
+    herr_t status;
+    jint *cd_valuesP;
+    jboolean isCopy;
+
+    if (cd_values == NULL) {
+        h5nullArgument( env, "H5Pmodify_filter:  cd_values is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    cd_valuesP = (*env)->GetIntArrayElements(cd_values,&isCopy);
+#else
+    cd_valuesP = (*env)->GetIntArrayElements(env,cd_values,&isCopy);
+#endif
+
+    if (cd_valuesP == NULL) {
+        h5JNIFatalError(env,  "H5Pmodify_filter:  cd_values not pinned");
+        return -1;
+    }
+
+    status = H5Pmodify_filter((hid_t)plist, (H5Z_filter_t)filter,(const unsigned int)flags,
+        (size_t)cd_nelmts, (unsigned int *)cd_valuesP);
+
+#ifdef __cplusplus
+    (*env)->ReleaseIntArrayElements(cd_values, cd_valuesP, JNI_ABORT);
+#else
+    (*env)->ReleaseIntArrayElements(env, cd_values, cd_valuesP, JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_filter_by_id( hid_t plist_id, H5Z_filter_t filter,
+ *                unsigned int *flags, size_t *cd_nelmts, unsigned int cd_values[],
+ *                size_t namelen, char *name[] )
+ * Signature: (III[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1filter_1by_1id
+  (JNIEnv *env, jclass clss, jint plist, jint filter, jintArray flags,
+  jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name)
+{
+    herr_t status;
+    int i=0;
+    jint *cd_valuesP, *flagsP;
+    jlong *cd_nelmsP;
+    jboolean isCopy;
+    size_t *nelmsP;
+    int rank;
+    long bs;
+    char *aName;
+    jstring str;
+
+    bs = (long)namelen;
+    if (bs <= 0) {
+        h5badArgument( env, "H5Pget_filter_by_id:  namelen <= 0");
+        return -1;
+    }
+
+    if (flags == NULL) {
+        h5nullArgument( env, "H5Pget_filter_by_id:  flags is NULL");
+        return -1;
+    }
+
+    if (cd_nelmts == NULL) {
+        h5nullArgument( env, "H5Pget_filter_by_id:  cd_nelms is NULL");
+        return -1;
+    }
+
+    if (cd_values == NULL) {
+        h5nullArgument( env, "H5Pget_filter_by_id:  cd_values is NULL");
+        return -1;
+    }
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pget_filter_by_id:  name is NULL");
+        return -1;
+    }
+
+    aName = (char*)malloc(sizeof(char)*bs);
+    if (aName == NULL) {
+        h5outOfMemory( env, "H5Pget_filter_by_id:  malloc failed");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    flagsP = (*env)->GetIntArrayElements(flags,&isCopy);
+#else
+    flagsP = (*env)->GetIntArrayElements(env,flags,&isCopy);
+#endif
+
+    if (flagsP == NULL) {
+        free(aName);
+        h5JNIFatalError(env,  "H5Pget_filter_by_id:  flags not pinned");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    cd_nelmsP = (*env)->GetLongArrayElements(cd_nelmts,&isCopy);
+#else
+    cd_nelmsP = (*env)->GetLongArrayElements(env,cd_nelmts,&isCopy);
+#endif
+
+    if (cd_nelmsP == NULL) {
+        free(aName);
+        h5JNIFatalError(env,  "H5Pget_filter_by_id:  cd_nelms not pinned");
+        return -1;
+    }
+
+    nelmsP = (size_t *)malloc( sizeof(size_t));
+
+    if (nelmsP == NULL) {
+#ifdef __cplusplus
+        (*env)->ReleaseIntArrayElements(flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(cd_nelmts,cd_nelmsP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,cd_nelmts,cd_nelmsP,JNI_ABORT);
+#endif
+        free(aName);
+        h5outOfMemory(env,  "H5Pget_filter_by_id:  cd_nelmts array not converted to unsigned int.");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    cd_valuesP = (*env)->GetIntArrayElements(cd_values,&isCopy);
+    rank  = (*env)->GetArrayLength(env, cd_values);
+#else
+    cd_valuesP = (*env)->GetIntArrayElements(env,cd_values,&isCopy);
+    rank  = (*env)->GetArrayLength(env, cd_values);
+#endif
+
+    if (cd_valuesP == NULL) {
+#ifdef __cplusplus
+        (*env)->ReleaseIntArrayElements(flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(cd_nelmts,cd_nelmsP,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(cd_values,cd_valuesP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,cd_nelmts,cd_nelmsP,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,cd_values,cd_valuesP,JNI_ABORT);
+#endif
+        free(aName);
+        free(nelmsP);
+        h5JNIFatalError(env,  "H5Pget_filter_by_id:  cd_values array not pinned.");
+        return -1;
+    }
+
+    status = H5Pget_filter_by_id( (hid_t)plist, (H5Z_filter_t)filter,
+        (unsigned int *)flagsP, (size_t *)nelmsP, (unsigned int *)cd_valuesP,
+        (size_t)namelen, (char *)aName, (unsigned int*)NULL);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        (*env)->ReleaseIntArrayElements(flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(cd_nelmts,cd_nelmsP,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(cd_values,cd_valuesP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,flags,flagsP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,cd_nelmts,cd_nelmsP,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,cd_values,cd_valuesP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+#ifdef __cplusplus
+
+        cd_nelmsP[0] = nelmsP[0];
+
+        str = env->NewStringUTF(aName);
+        (*env)->ReleaseIntArrayElements(flags,flagsP,0);
+        (*env)->ReleaseLongArrayElements(cd_nelmts,cd_nelmsP,0);
+        (*env)->ReleaseIntArrayElements(cd_values,cd_valuesP,0);
+#else
+        str = (*env)->NewStringUTF(env, aName);
+        (*env)->ReleaseIntArrayElements(env,flags,flagsP,0);
+        (*env)->ReleaseLongArrayElements(env,cd_nelmts,cd_nelmsP,0);
+        (*env)->ReleaseIntArrayElements(env,cd_values,cd_valuesP,0);
+#endif
+
+    }
+
+    free(aName);
+    free(nelmsP);
+
+    return (jint)status;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_fclose_degree
+ * Signature: (IJI)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fclose_1degree
+  (JNIEnv *env, jclass clss, jint plist, jint fc_degree)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_fclose_degree((hid_t) plist, (H5F_close_degree_t) fc_degree);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_fclose_degree
+ * Signature: (IJI)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1fclose_1degree
+  (JNIEnv *env, jclass clss, jint plist)
+{
+    H5F_close_degree_t degree;
+    herr_t retVal = -1;
+    retVal =  H5Pget_fclose_degree((hid_t) plist, &degree);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)degree;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *                    File access properties                          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pset_fapl_family ( hid_t fapl_id, hsize_t memb_size, hid_t memb_fapl_id )
+ * Purpose:   Sets the file access property list to use the family driver
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fapl_1family
+  (JNIEnv *env, jclass clss, jint plist, jlong memb_size, jint memb_plist)
+{
+    long ms;
+    herr_t retVal = -1;
+    ms = (long)memb_size;
+    retVal =  H5Pset_fapl_family((hid_t)plist, (hsize_t)ms, (hid_t)memb_plist);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pget_fapl_family ( hid_t fapl_id, hsize_t *memb_size, hid_t *memb_fapl_id )
+ * Purpose:   Returns file access property list information
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1family
+  (JNIEnv *env, jclass clss, jint tid, jlongArray memb_size, jintArray memb_plist)
+{
+    herr_t status;
+    jlong *sizeArray;
+    jint *plistArray;
+    jboolean isCopy;
+    hsize_t *sa;
+    int i;
+    int rank;
+
+    if (memb_size == NULL) {
+        h5nullArgument( env, "H5Pget_family:  memb_size is NULL");
+        return -1;
+    }
+    if (memb_plist == NULL) {
+        h5nullArgument( env, "H5Pget_family:  memb_plist is NULL");
+        return -1;
+    }
+    sizeArray = (jlong *)(*env)->GetLongArrayElements(env,memb_size,&isCopy);
+    if (sizeArray == NULL) {
+        h5JNIFatalError(env,  "H5Pget_family:  sizeArray not pinned");
+        return -1;
+    }
+    rank  = (*env)->GetArrayLength(env, memb_size);
+    sa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (sa == NULL) {
+        (*env)->ReleaseLongArrayElements(env,memb_size,sizeArray,JNI_ABORT);
+        h5outOfMemory(env,  "H5Screate-simple:  dims not converted to hsize_t");
+        return -1;
+    }
+    plistArray = (jint *)(*env)->GetIntArrayElements(env,memb_plist,&isCopy);
+    if (plistArray == NULL) {
+        (*env)->ReleaseLongArrayElements(env,memb_size,sizeArray,JNI_ABORT);
+        h5JNIFatalError(env,  "H5Pget_family:  plistArray not pinned");
+        return -1;
+    }
+    status = H5Pget_fapl_family ((hid_t)tid, sa, (hid_t *)plistArray);
+
+    if (status < 0)
+    {
+        free(sa);
+        (*env)->ReleaseLongArrayElements(env,memb_size,sizeArray,JNI_ABORT);
+        (*env)->ReleaseIntArrayElements(env,memb_plist,plistArray,JNI_ABORT);
+        h5libraryError(env);
+    }
+    else
+    {
+        for (i= 0; i < rank; i++) {
+            sa[i] = sizeArray[i];
+        }
+        free(sa);
+        (*env)->ReleaseLongArrayElements(env,memb_size,sizeArray,0);
+        (*env)->ReleaseIntArrayElements(env,memb_plist,plistArray,0);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pset_fapl_core( hid_t fapl_id, size_t increment, hbool_t backing_store )
+ * Purpose:   Modifies the file access property list to use the H5FD_CORE driver
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fapl_1core
+  (JNIEnv *env, jclass clss, jint fapl_id, jint increment, jboolean backing_store)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_fapl_core( (hid_t) fapl_id, (size_t) increment, (hbool_t) backing_store );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pget_fapl_core( hid_t fapl_id, size_t *increment, hbool_t *backing_store )
+ * Purpose:   Queries core file driver properties
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1fapl_1core
+  (JNIEnv *env, jclass clss, jint fapl_id, jintArray increment, jbooleanArray backing_store)
+{
+    herr_t status;
+    jint *incArray;
+    jboolean *backArray;
+    jboolean isCopy;
+
+    if (increment == NULL) {
+        h5nullArgument( env, "H5Pget_fapl_core:  increment is NULL");
+        return -1;
+    }
+    if (backing_store == NULL) {
+        h5nullArgument( env, "H5Pget_fapl_core:  backing_store is NULL");
+        return -1;
+    }
+
+    incArray = (jint *)(*env)->GetIntArrayElements(env,increment,&isCopy);
+    if (incArray == NULL) {
+        h5JNIFatalError(env,  "H5Pget_fapl_core:  incArray not pinned");
+        return -1;
+    }
+
+    backArray = (jboolean *)(*env)->GetBooleanArrayElements(env,backing_store,&isCopy);
+    if (backArray == NULL) {
+        (*env)->ReleaseIntArrayElements(env,increment,incArray,JNI_ABORT);
+        h5JNIFatalError(env,  "H5Pget_fapl_core:  backArray not pinned");
+        return -1;
+    }
+    status = H5Pget_fapl_core( (hid_t) fapl_id, (size_t *)incArray, (hbool_t *)backArray );
+
+    if (status < 0)
+    {
+        (*env)->ReleaseIntArrayElements(env,increment,incArray,JNI_ABORT);
+        (*env)->ReleaseBooleanArrayElements(env,backing_store,backArray,JNI_ABORT);
+        h5libraryError(env);
+    }
+    else
+    {
+        (*env)->ReleaseIntArrayElements(env,increment,incArray,0);
+        (*env)->ReleaseBooleanArrayElements(env,backing_store,backArray,0);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pset_family_offset ( hid_t fapl_id, hsize_t offset )
+ * Purpose:   Sets offset property for low-level access to a file in a family of files
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1family_1offset
+  (JNIEnv *env, jclass clss, jint fapl_id, jlong offset)
+{
+    herr_t retVal = -1;
+    retVal =  H5Pset_family_offset ( (hid_t) fapl_id, (hsize_t) offset );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pget_family_offset ( hid_t fapl_id, hsize_t *offset )
+ * Purpose:   Retrieves a data offset from the file access property list
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1family_1offset
+  (JNIEnv *env, jclass clss, jint fapl_id)
+{
+    hsize_t offset = -1;
+    herr_t  retVal = -1;
+    retVal =  H5Pget_family_offset ( (hid_t) fapl_id, &offset );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jlong)offset;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Pset_fapl_log( hid_t fapl_id, const char *logfile, unsigned int flags, size_t buf_size )
+ * Purpose:   Sets up the use of the logging driver
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1fapl_1log
+  (JNIEnv *env, jclass clss, jint fapl_id, jstring logfile, jint flags, jint buf_size)
+{
+    herr_t retVal = -1;
+    char * pLogfile;
+    jboolean isCopy;
+
+    if (logfile == NULL) {
+        h5nullArgument( env, "H5Pset_fapl_log:  logfile is NULL");
+        return -1;
+    }
+
+    pLogfile = (char *)(*env)->GetStringUTFChars(env,logfile,&isCopy);
+
+    if (pLogfile == NULL) {
+        h5JNIFatalError(env,  "H5Pset_fapl_log:  logfile not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pset_fapl_log( (hid_t) fapl_id, (const char *)pLogfile, (unsigned int) flags, (size_t) buf_size );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    (*env)->ReleaseStringUTFChars(env, logfile, pLogfile);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature:  herr_t H5Premove_filter (hid_t obj_id, H5Z_filter_t filter)
+ * Purpose:
+ */
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5P1remove_1filter
+  (JNIEnv *env, jclass clss, jint obj_id, jint filter)
+{
+    herr_t status;
+
+    status = H5Premove_filter ((hid_t) obj_id, (H5Z_filter_t) filter);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return status;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+    Modified by Peter Cao on July 26, 2006:                            
+        Some of the Generic Property APIs have callback function 
+        pointers, which Java does not support. Only the Generic 
+        Property APIs without function pointers are implemented
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcreate_list
+ * Signature: hid_t H5Pcreate_list( hid_t class)
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcreate_1list
+  (JNIEnv *env, jclass clss, jint cls)
+{
+    hid_t retVal = -1;
+
+    retVal =  H5Pcopy((hid_t)cls);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset
+ * Signature: herr_t H5Pset( hid_t plid, const char *name, void *value)
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset
+  (JNIEnv *env, jclass clss, jint plid, jstring name, jint val)
+{
+    char* cstr;
+    jboolean isCopy;    
+    hid_t retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pset: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pset: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pset((hid_t)plid, cstr, &val);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pexist
+ * Signature: htri_t H5Pexist( hid_t id, const char *name )
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pexist
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;    
+    hid_t retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pexist: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pexist: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pexist((hid_t)plid, cstr);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_size
+ * Signature: int H5Pget_size( hid_t id, const char *name, size_t *size ) 
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1size
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;    
+    hid_t retVal = -1;
+    size_t size;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pget_size: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pget_size: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget_size((hid_t)plid, cstr, &size);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_nprops
+ * Signature: int H5Pget_nprops( hid_t id, size_t *nprops )  
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1nprops
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t retVal = -1;
+    size_t nprops;
+
+    retVal =  H5Pget_nprops((hid_t)plid, &nprops);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jlong) nprops;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class_name
+ * Signature: char * H5Pget_class_name( hid_t pcid ) 
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1class_1name
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    char *c_str;
+    jstring j_str;
+
+    c_str =  H5Pget_class_name((hid_t)plid);
+
+    if (c_str < 0) {
+        h5libraryError(env);
+    }
+
+    j_str = (*env)->NewStringUTF(env,c_str);
+    if (j_str == NULL) {
+        h5outOfMemory( env,"H5Pget_class_name: return string failed");
+    }
+
+    return j_str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_class_parent
+ * Signature: hid_t H5Pget_class_parent( hid_t pcid )   
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1class_1parent
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t retVal = -1;
+
+    retVal =  H5Pget_class_parent((hid_t)plid);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pisa_class
+ * Signature: htri_t H5Pisa_class( hid_t plist, hid_t pclass )    
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pisa_1class
+  (JNIEnv *env, jclass clss, jint plid, jint pcls)
+{
+    htri_t retVal = -1;
+
+    retVal =  H5Pisa_class((hid_t)plid, (hid_t)pcls);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget
+ * Signature: herr_t H5Pget( hid_t plid, const char *name, void *value )
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;
+    jint val;    
+    jint retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pget: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pget: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pget((hid_t)plid, cstr, &val);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)val;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pequal
+ * Signature: htri_t H5Pequal( hid_t id1, hid_t id2 )    
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pequal
+  (JNIEnv *env, jclass clss, jint plid1, jint plid2)
+{
+    htri_t retVal = -1;
+
+    retVal =  H5Pequal((hid_t)plid1, (hid_t)plid2);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcopy_prop
+ * Signature: herr_t H5Pcopy_prop( hid_t dst_id, hid_t src_id, const char *name ) 
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcopy_1prop
+  (JNIEnv *env, jclass clss, jint dst_plid, jint src_plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;
+    jint retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Pcopy_prop: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Pcopy_prop: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Pcopy_prop((hid_t)dst_plid, (hid_t)src_plid, cstr);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Premove
+ * Signature: herr_t H5Premove( hid_t plid; const char *name ) 
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Premove
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;
+    jint retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Premove: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Premove: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Premove((hid_t)plid, cstr);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Punregister
+ * Signature: herr_t H5Punregister( H5P_class_t class, const char *name )  
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Punregister
+  (JNIEnv *env, jclass clss, jint plid, jstring name)
+{
+    char* cstr;
+    jboolean isCopy;
+    jint retVal = -1;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Punregister: name is NULL");
+        return -1;
+    }
+
+    cstr = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+    if (cstr == NULL) {
+        h5JNIFatalError( env, "H5Punregister: name not pinned");
+        return -1;
+    }
+
+    retVal =  H5Punregister((hid_t)plid, cstr);
+
+    (*env)->ReleaseStringUTFChars(env,name,cstr);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pclose_list
+ * Signature: herr_t H5Pclose_class( hid_t plist )   
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pclose_1class
+  (JNIEnv *env, jclass clss, jint plid)
+{
+    hid_t retVal = -1;
+
+    retVal =  H5Pclose_class((hid_t)plid);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint) retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_create_intermediate_group
+ * Signature: herr_t H5Pset_create_intermediate_group( hid_t lcpl_id, unsigned crt_intermed_group )
+ */
+JNIEXPORT void JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1create_1intermediate_1group
+  (JNIEnv *env, jclass clss, jint lcpl_id, jboolean crt_intermed_group)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pset_create_intermediate_group((hid_t) lcpl_id, crt_intermed_group ? 1 : 0);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_create_intermediate_group
+ * Signature: herr_t H5Pget_create_intermediate_group( hid_t lcpl_id, unsigned *crt_intermed_group )
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1create_1intermediate_1group
+  (JNIEnv *env, jclass clss, jint lcpl_id)
+{
+    hid_t retVal = -1;
+    unsigned crt_intermed_group;
+
+    retVal = H5Pget_create_intermediate_group((hid_t) lcpl_id, &crt_intermed_group);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return crt_intermed_group ? JNI_TRUE : JNI_FALSE;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_local_heap_size_hint
+ * Signature: herr_t H5Pset_local_heap_size_hint( hid_t gcpl_id, size_t size_hint )
+ */
+JNIEXPORT void JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint
+  (JNIEnv *env, jclass clss, jint gcpl_id, jint size_hint)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pset_local_heap_size_hint((hid_t) gcpl_id, size_hint);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_local_heap_size_hint
+ * Signature: herr_t H5Pget_local_heap_size_hint( hid_t gcpl_id, size_t *size_hint )
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint
+  (JNIEnv *env, jclass clss, jint gcpl_id)
+{
+    hid_t retVal = -1;
+    size_t size_hint;
+
+    retVal = H5Pget_local_heap_size_hint((hid_t) gcpl_id, &size_hint);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return size_hint;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_link_phase_change
+ * Signature: herr_t H5Pset_link_phase_change( hid_t gcpl_id, unsigned max_compact, unsigned min_dense )
+ */
+JNIEXPORT void JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1link_1phase_1change
+  (JNIEnv *env, jclass clss, jint gcpl_id, jint max_compact, jint min_dense)
+{
+    hid_t retVal = -1;
+
+    retVal = H5Pset_link_phase_change((hid_t) gcpl_id, max_compact, min_dense);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_link_phase_change
+ * Signature: herr_t H5Pget_link_phase_change( hid_t gcpl_id, unsigned *max_compact, unsigned *min_dense ) 
+ */
+JNIEXPORT jintArray JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1link_1phase_1change
+  (JNIEnv *env, jclass clss, jint gcpl_id)
+{
+    hid_t retVal = -1;
+    jintArray iarray;
+    jint max_compact_min_dense[2];
+
+    retVal = H5Pget_link_phase_change((hid_t) gcpl_id, (unsigned*) max_compact_min_dense, 
+                                      (unsigned*) (max_compact_min_dense + 1));
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    if (max_compact_min_dense[0] < 0 || max_compact_min_dense[1] < 0)
+    {
+        h5raiseException( env, "java/lang/RuntimeException", 
+                          "H5Pget_link_phase_change:  parameter overflow");
+        return NULL;
+    }
+
+#ifdef __cplusplus
+    iarray = env->NewIntArray(2);
+#else
+    iarray = (*env)->NewIntArray(env,2);
+#endif
+    if (iarray == NULL) {
+        h5outOfMemory( env,  "H5Pget_link_phase_change" );
+        return NULL;
+    }
+#ifdef __cplusplus
+    env->SetIntArrayRegion(iarray,0,2,max_compact_min_dense);
+#else
+    (*env)->SetIntArrayRegion(env,iarray,0,2,max_compact_min_dense);
+#endif
+    
+    return iarray;
+}
+
+H5T_conv_ret_t abort_on_overflow_cb(int except_type, hid_t *src_id, hid_t *dst_id, void *src_buf, void *dst_buf, void *op_data)
+{
+    if (except_type == H5T_CONV_EXCEPT_RANGE_HI || except_type == H5T_CONV_EXCEPT_RANGE_LOW)
+    {
+        return H5T_CONV_ABORT;
+    }
+    return H5T_CONV_UNHANDLED;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcreate_xfer_abort_overflow
+ * Signature: hid_t H5Pcreate_xfer_abort_overflow() 
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcreate_1xfer_1abort_1overflow
+  (JNIEnv *env, jclass clss)
+{
+    hid_t plist;
+    herr_t status;
+    
+    plist = H5Pcreate(H5P_DATASET_XFER);
+    status = H5Pset_type_conv_cb(plist, (H5T_conv_except_func_t) abort_on_overflow_cb, NULL);
+    if (status < 0)
+    {
+        return status;
+    }
+    return plist;
+}
+
+H5T_conv_ret_t abort_cb(int except_type, hid_t *src_id, hid_t *dst_id, void *src_buf, void *dst_buf, void *op_data)
+{
+    return H5T_CONV_ABORT;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pcreate_xfer_abort
+ * Signature: hid_t H5Pcreate_xfer_abort() 
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pcreate_1xfer_1abort
+  (JNIEnv *env, jclass clss)
+{
+    hid_t plist;
+    herr_t status;
+    
+    plist = H5Pcreate(H5P_DATASET_XFER);
+    status = H5Pset_type_conv_cb(plist, (H5T_conv_except_func_t) abort_cb, NULL);
+    if (status < 0)
+    {
+        return status;
+    }
+    return plist;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pget_char_encoding(hid_t cpl_id)
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pget_1char_1encoding
+  (JNIEnv *env, jclass clss, jint cpl_id)
+{
+    H5T_cset_t encoding;
+    herr_t retVal;
+    retVal = H5Pget_char_encoding((hid_t)cpl_id, &encoding);
+    if (retVal >= 0) {
+        return (jint) encoding;
+    } else {
+        h5libraryError(env);
+        return retVal;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Pset_char_encoding(hid_t cpl_id)
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Pset_1char_1encoding
+  (JNIEnv *env, jclass clss, jint cpl_id, jint encoding)
+{
+    herr_t retVal;
+    retVal = H5Pset_char_encoding((hid_t)cpl_id, (H5T_cset_t)encoding);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return retVal;
+}
diff --git a/source/c/jhdf5/h5rImpJHDF5.c b/source/c/jhdf5/h5rImpJHDF5.c
new file mode 100755
index 0000000..ca2bb62
--- /dev/null
+++ b/source/c/jhdf5/h5rImpJHDF5.c
@@ -0,0 +1,672 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Reference API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rcreate
+ * Signature: ([BILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rcreate___3BILjava_lang_String_2II
+  (JNIEnv *env, jclass clss,
+  jbyteArray ref, jint loc_id, jstring name, jint ref_type, jint space_id)
+{
+    char* rName;
+    herr_t status;
+    jbyte *refP;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rcreate:  ref is NULL");
+        return -1;
+    }
+    if (name == NULL) {
+        h5nullArgument( env, "H5Rcreate:  name is NULL");
+        return -1;
+    }
+    if (ref_type == H5R_OBJECT) {
+#ifdef __cplusplus
+        if (env->GetArrayLength(ref) < 8) {
+            h5badArgument( env, "H5Rcreate:  ref input array < 8");
+            return -1;
+        }
+#else
+        if ((*env)->GetArrayLength(env, ref) < 8) {
+            h5badArgument( env, "H5Rcreate:  ref input array < 8");
+            return -1;
+        }
+#endif
+    } else if (ref_type == H5R_DATASET_REGION) {
+#ifdef __cplusplus
+        if (env->GetArrayLength( ref) < 12) {
+            h5badArgument( env, "H5Rcreate:  region ref input array < 12");
+            return -1;
+        }
+#else
+        if ((*env)->GetArrayLength(env, ref) < 12) {
+            h5badArgument( env, "H5Rcreate:  region ref input array < 12");
+            return -1;
+        }
+#endif
+    } else {
+        h5badArgument( env, "H5Rcreate:  ref_type unknown type ");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    refP = (jbyte *)env->GetByteArrayElements(ref,NULL);
+#else
+    refP = (jbyte *)(*env)->GetByteArrayElements(env,ref,NULL);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rcreate:  ref not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    rName = (char *)env->GetStringUTFChars(name,NULL);
+#else
+    rName = (char *)(*env)->GetStringUTFChars(env,name,NULL);
+#endif
+    if (rName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+        h5JNIFatalError(env,  "H5Rcreate:  name not pinned");
+        return -1;
+    }
+
+    status = H5Rcreate(refP, loc_id, rName, (H5R_type_t)ref_type, space_id);
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,rName);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,rName);
+#endif
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+        return -1;
+    }
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(ref,refP,0);
+#else
+    (*env)->ReleaseByteArrayElements(env,ref,refP,0);
+#endif
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rcreate
+ * Signature: (ILjava/lang/String;)[J
+ */
+JNIEXPORT jlongArray JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rcreate__I_3Ljava_lang_String_2
+  (JNIEnv *env, jclass clss, jint loc_id, jobjectArray names)
+{
+    char* rName;
+    herr_t status;
+    jint arrayLen;
+    jlongArray array;
+    jlong* arrayP;
+    jlong* arrayPR;
+    jstring name;
+    int i;
+
+    if (names == NULL) {
+        h5nullArgument( env, "H5Rcreate:  names is NULL");
+        return NULL;
+    }
+
+#ifdef __cplusplus
+    arrayLen = env->GetArrayLength(names);
+    array = env->NewLongArray(arrayLen);
+    if (array == NULL) {
+        return NULL;
+    }
+    arrayP = (long *)env->GetLongArrayElements(array,NULL);
+#else
+    arrayLen = (*env)->GetArrayLength(env,names);
+    array = (*env)->NewLongArray(env,arrayLen);
+    if (array == NULL) {
+        return NULL;
+    }
+    arrayP = (jlong *)(*env)->GetLongArrayElements(env,array,NULL);
+#endif
+    if (arrayP == NULL) {
+        h5JNIFatalError(env,  "H5Rcreate:  array not pinned");
+        return NULL;
+    }
+    
+    for (i = 0,arrayPR=arrayP; i < arrayLen; ++i,++arrayPR) { 
+    
+#ifdef __cplusplus
+        name = env->GetObjectArrayElement(names,i);
+#else
+        name = (*env)->GetObjectArrayElement(env,names,i);
+#endif
+        if (name == NULL) {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(array,arrayP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,array,arrayP,JNI_ABORT);
+#endif
+            return NULL;
+        }
+#ifdef __cplusplus
+        rName = (char *)env->GetStringUTFChars(name,NULL);
+#else
+        rName = (char *)(*env)->GetStringUTFChars(env,name,NULL);
+#endif
+        if (rName == NULL) {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(array,arrayP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,array,arrayP,JNI_ABORT);
+#endif
+            h5JNIFatalError(env,  "H5Rcreate:  name not pinned");
+            return NULL;
+        }
+
+        status = H5Rcreate(arrayPR, loc_id, rName, H5R_OBJECT, -1);
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(name,rName);
+#else
+        (*env)->ReleaseStringUTFChars(env,name,rName);
+#endif
+        if (status < 0) {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(array,arrayP,0);
+#else
+            (*env)->ReleaseLongArrayElements(env,array,arrayP,0);
+#endif
+            h5libraryError(env);
+            return NULL;
+        }
+    
+    } /* for (i=0...)*/
+
+#ifdef __cplusplus
+    env->ReleaseLongArrayElements(array,arrayP,0);
+#else
+    (*env)->ReleaseLongArrayElements(env,array,arrayP,0);
+#endif
+
+    return array;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rdereference
+ * Signature: (IIJ)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rdereference__IJ
+  (JNIEnv *env, jclass clss, jint dataset, jlong ref)
+{
+    hid_t id;
+
+    id = H5Rdereference((hid_t)dataset, H5R_OBJECT, &ref);
+
+    if (id < 0) {
+        h5libraryError(env);
+    }
+    return (jint) id;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rdereference
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rdereference__II_3B
+  (JNIEnv *env, jclass clss, jint dataset, jint ref_type, jbyteArray ref )
+{
+    jboolean isCopy;
+    jbyte *refP;
+    herr_t status;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rdereference:  ref is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if ((ref_type == H5R_OBJECT) && env->GetArrayLength(ref) < 8) {
+        h5badArgument( env, "H5Rdereference:  obj ref input array < 8");
+    } else if ((ref_type == H5R_DATASET_REGION)
+        && env->GetArrayLength(ref) < 12) {
+        h5badArgument( env, "H5Rdereference:  region ref input array < 12");
+    }
+    refP = (jbyte *)env->GetByteArrayElements(ref,&isCopy);
+#else
+    if ((ref_type == H5R_OBJECT) && (*env)->GetArrayLength(env, ref) < 8) {
+        h5badArgument( env, "H5Rdereference:  obj ref input array < 8");
+    } else if ((ref_type == H5R_DATASET_REGION)
+        && (*env)->GetArrayLength(env, ref) < 12) {
+        h5badArgument( env, "H5Rdereference:  region ref input array < 12");
+    }
+    refP = (jbyte *)(*env)->GetByteArrayElements(env,ref,&isCopy);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rderefernce:  ref not pinned");
+        return -1;
+    }
+
+    status = H5Rdereference((hid_t)dataset, (H5R_type_t)ref_type, refP);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Rget_region
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rget_1region
+  (JNIEnv *env, jclass clss, jint dataset, jint ref_type,
+  jbyteArray ref )
+{
+    hid_t status;
+    jboolean isCopy;
+    jbyte *refP;
+
+    if (ref_type != H5R_DATASET_REGION)  {
+        h5badArgument( env, "H5Rget_region:  bad ref_type ");
+        return -1;
+    }
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_region:  ref is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if ( env->GetArrayLength(ref) < 12) {
+        h5badArgument( env, "H5Rget_region:  region ref input array < 12");
+    }
+    refP = (jbyte *)env->GetByteArrayElements(ref,&isCopy);
+#else
+    if ( (*env)->GetArrayLength(env, ref) < 12) {
+        h5badArgument( env, "H5Rget_region:  region ref input array < 12");
+    }
+    refP = (jbyte *)(*env)->GetByteArrayElements(env,ref,&isCopy);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_region:  ref not pinned");
+        return -1;
+    }
+
+    status = H5Rget_region((hid_t)dataset, (H5R_type_t)ref_type, refP);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5G_obj_t H5Rget_obj_type(hid_t id, H5R_type_t ref_type, void *_ref)
+ * Signature: (I[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rget_1obj_1type
+  (JNIEnv *env, jclass clss, jint loc_id, jint ref_type, jbyteArray ref)
+{
+
+    herr_t status;
+    H5O_type_t obj_type;
+    jboolean isCopy;
+    jbyte *refP;
+
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_object_type:  ref is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    refP = (jbyte *)env->GetByteArrayElements(ref,&isCopy);
+#else
+    refP = (jbyte *)(*env)->GetByteArrayElements(env,ref,&isCopy);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_object_type:  ref not pinned");
+        return -1;
+    }
+
+    status = H5Rget_obj_type((hid_t)loc_id, (H5R_type_t)ref_type, refP, &obj_type);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)obj_type;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    String H5Rget_name(hid_t id, H5R_type_t ref_type, void *ref)
+ * Signature: (I[B)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rget_1name__II_3B
+  (JNIEnv *env, jclass clss, jint loc_id, jint ref_type, jbyteArray ref)
+{
+		ssize_t size;
+    H5O_type_t obj_type;
+    jbyte *refP;
+    char *rName;
+    int rname_buf_size = 128;
+    jstring str;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_name:  ref is NULL");
+        return NULL;
+    }
+
+#ifdef __cplusplus
+    refP = (jbyte *)env->GetByteArrayElements(ref, NULL);
+#else
+    refP = (jbyte *)(*env)->GetByteArrayElements(env,ref, NULL);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_name:  ref not pinned");
+        return NULL;
+    }
+
+    rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    if (rName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+        h5outOfMemory(env, "H5Rget_name:  malloc failed");
+        return NULL;
+    }
+
+    size = H5Rget_name((hid_t)loc_id, (H5R_type_t)ref_type, refP, rName, rname_buf_size);
+
+    if (size < 0) {
+        free(rName);
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+        return NULL;
+    }
+    if (size >= rname_buf_size) {
+    		free(rName);
+    		rname_buf_size = size + 1;
+		    rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    		size = H5Rget_name((hid_t)loc_id, (H5R_type_t)ref_type, refP, rName, rname_buf_size);
+        if (size < 0) {
+            free(rName);
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+            h5libraryError(env);
+            return NULL;
+        }
+    }
+ 		rName[size] = '\0';
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(ref,refP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+
+    /* successful return -- save the string; */
+#ifdef __cplusplus
+    str = env->NewStringUTF(rName);
+#else
+    str = (*env)->NewStringUTF(env,rName);
+#endif
+    free(rName);
+    if (str == NULL) {
+        return NULL;
+    }
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    String H5Rget_name(hid_t id, jlong ref)
+ * Signature: (I[B)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rget_1name__IJ
+  (JNIEnv *env, jclass clss, jint loc_id, jlong ref)
+{
+		ssize_t size;
+    H5O_type_t obj_type;
+    char *rName;
+    int rname_buf_size = 128;
+    jstring str;
+
+    rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    if (rName == NULL) {
+        h5outOfMemory(env, "H5Rget_name:  malloc failed");
+        return NULL;
+    }
+
+    size = H5Rget_name((hid_t)loc_id, H5R_OBJECT, &ref, rName, rname_buf_size);
+
+    if (size < 0) {
+        free(rName);
+        h5libraryError(env);
+        return NULL;
+    }
+    if (size >= rname_buf_size) {
+    		free(rName);
+    		rname_buf_size = size + 1;
+		    rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    		size = H5Rget_name((hid_t)loc_id, H5R_OBJECT, &ref, rName, rname_buf_size);
+        if (size < 0) {
+            free(rName);
+            h5libraryError(env);
+            return NULL;
+        }
+    }
+ 		rName[size] = '\0';
+
+    /* successful return -- save the string; */
+#ifdef __cplusplus
+    str = env->NewStringUTF(rName);
+#else
+    str = (*env)->NewStringUTF(env,rName);
+#endif
+    free(rName);
+    if (str == NULL) {
+        return NULL;
+    }
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    String[] H5Rget_name(hid_t id, H5R_type_t ref_type, long *_ref)
+ * Signature: (I[B)Ljava/lang/String;
+ */
+JNIEXPORT jobjectArray JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Rget_1name__I_3J
+  (JNIEnv *env, jclass clss, jint loc_id, jlongArray ref)
+{
+		ssize_t size;
+    H5O_type_t obj_type;
+    jlong *refP, *refPR;
+    char *rName;
+    int rname_buf_size = 128;
+    jclass stringClass;
+		jint arrayLen;
+    jobjectArray array;
+    jstring str;
+    int i;
+
+    if (ref == NULL) {
+        h5nullArgument( env, "H5Rget_name:  ref is NULL");
+        return NULL;
+    }
+
+#ifdef __cplusplus
+    refP = (jlong *)env->GetLongArrayElements(ref, NULL);
+#else
+    refP = (jlong *)(*env)->GetLongArrayElements(env,ref, NULL);
+#endif
+    if (refP == NULL) {
+        h5JNIFatalError(env,  "H5Rget_name:  ref not pinned");
+        return NULL;
+    }
+#ifdef __cplusplus
+    arrayLen = env->GetArrayLength(ref);
+    stringClass = env->FindClass("java/lang/String");
+    array = env->NewObjectArray(arrayLen, stringClass, NULL);
+#else
+    arrayLen = (*env)->GetArrayLength(env, ref);
+    stringClass = (*env)->FindClass(env, "java/lang/String");
+    array = (*env)->NewObjectArray(env, arrayLen, stringClass, NULL);
+#endif
+    if (array == NULL) {
+        return NULL;
+    }
+
+    rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    if (rName == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(ref,refP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+        h5outOfMemory(env, "H5Rget_name:  malloc failed");
+        return NULL;
+    }
+    
+    for (i = 0,refPR = refP; i < arrayLen; ++i,++refPR) {
+
+        size = H5Rget_name((hid_t)loc_id, H5R_OBJECT, refPR, rName, rname_buf_size);
+
+        if (size < 0) {
+            free(rName);
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(ref,refP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+            h5libraryError(env);
+            return NULL;
+        }
+        if (size >= rname_buf_size) {
+        		free(rName);
+        		rname_buf_size = size + 1;
+		        rName = (char*) malloc(sizeof(char) * rname_buf_size);
+    		    size = H5Rget_name((hid_t)loc_id, H5R_OBJECT, refP, rName, rname_buf_size);
+            if (size < 0) {
+                free(rName);
+#ifdef __cplusplus
+                env->ReleaseLongArrayElements(ref,refP,JNI_ABORT);
+#else
+                (*env)->ReleaseLongArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+                h5libraryError(env);
+                return NULL;
+            }
+        }
+ 		    rName[size] = '\0';
+
+        /* successful return -- save the string; */
+#ifdef __cplusplus
+        str = env->NewStringUTF(rName);
+#else
+        str = (*env)->NewStringUTF(env,rName);
+#endif
+        if (str == NULL) {
+            free(rName);
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(ref,refP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+            return NULL;
+        }
+#ifdef __cplusplus
+        env->SetObjectArrayElement(array, i, str);
+#else
+        (*env)->SetObjectArrayElement(env, array, i, str);
+#endif
+
+    } /* for (i = 0...) */
+
+#ifdef __cplusplus
+    env->ReleaseLongArrayElements(ref,refP,JNI_ABORT);
+#else
+    (*env)->ReleaseLongArrayElements(env,ref,refP,JNI_ABORT);
+#endif
+    free(rName);
+
+    return array;
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5sImpJHDF5.c b/source/c/jhdf5/h5sImpJHDF5.c
new file mode 100755
index 0000000..ad37d20
--- /dev/null
+++ b/source/c/jhdf5/h5sImpJHDF5.c
@@ -0,0 +1,1314 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Dataspace Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Screate
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Screate
+  (JNIEnv *env, jclass clss, jint type)
+{
+    hid_t retVal = -1;
+    retVal =  H5Screate((H5S_class_t)type);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Screate_simple
+ * Signature: (I[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Screate_1simple
+  (JNIEnv *env, jclass clss, jint rank, jbyteArray dims, jbyteArray maxdims)
+{
+    hid_t status;
+    jbyte *dimsP, *maxdimsP;
+    jboolean isCopy;
+    hsize_t *sa;
+    hsize_t *msa;
+    int i;
+    hsize_t *lp;
+    jlong *jlp;
+
+    if (dims == NULL) {
+        h5nullArgument( env, "H5Screate_simple:  dims is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    dimsP = env->GetByteArrayElements(dims,&isCopy);
+#else
+    dimsP = (*env)->GetByteArrayElements(env,dims,&isCopy);
+#endif
+
+    if (dimsP == NULL) {
+        h5JNIFatalError(env,  "H5Screate-simple:  dims not pinned");
+        return -1;
+    }
+    sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (sa == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+
+        h5outOfMemory(env,  "H5Screate-simple:  dims not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)dimsP;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    if (maxdims == NULL) {
+        maxdimsP = NULL;
+        msa = (hsize_t *)maxdimsP;
+    } else
+    {
+#ifdef __cplusplus
+        maxdimsP = env->GetByteArrayElements(maxdims,&isCopy);
+#else
+        maxdimsP = (*env)->GetByteArrayElements(env,maxdims,&isCopy);
+#endif
+        if (maxdimsP == NULL)  {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+            h5JNIFatalError(env,  "H5Screate-simple:  maxdims not pinned");
+            return -1;
+        }
+    msa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (msa == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+            env->ReleaseByteArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+            free (sa);
+            h5outOfMemory(env,  "H5Screate-simple:  dims not converted to hsize_t");
+            return -1;
+        }
+    jlp = (jlong *)maxdimsP;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+    }
+
+    status = H5Screate_simple(rank, (const hsize_t *)sa, (const hsize_t *)msa);
+    if (maxdimsP != NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+        free (msa);
+    }
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+    free (sa);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Scopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Scopy
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Scopy (space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_elements
+ * Signature: (III[J)I
+ */
+
+#ifdef notdef
+// 10/28/99 -- added code to copy the array -- this is not used,
+// but serves as a reminder in case we try to implement this in
+// the future....
+/*
+ *  Note:  the argument coord is actually long coord[][], which has been
+ *         flattened by the caller.
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1elements
+  (JNIEnv *env, jclass clss, jint space_id, jint op, jint num_elemn, jlongArray coord)
+{
+    herr_t status;
+    jint i;
+    jlong *P;
+    jboolean isCopy;
+    hssize_t *sa;
+    int rank;
+
+    if (coord == NULL) {
+        h5nullArgument( env, "H5Sselect_elements:  coord is NULL");
+        return -1;
+    }
+
+    P = (*env)->GetLongArrayElements(env,coord,&isCopy);
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Sselect_elements:  coord not pinned");
+        return -1;
+    }
+    sa = (hssize_t *)malloc( num_elems * 2 * sizeof(hssize_t));
+    if (sa == NULL) {
+        (*env)->ReleaseLongArrayElements(env,coord,P,JNI_ABORT);
+        h5outOfMemory(env,  "H5Sselect_elements:  coord array not converted to hssize_t");
+        return -1;
+    }
+    for (i= 0; i < (num_elsms * 2); i++) {
+        sa[i] = P[i];
+    }
+
+    status = H5Sselect_elements (space_id, (H5S_seloper_t)op, num_elemn, (const hssize_t **)&sa);
+    (*env)->ReleaseLongArrayElements(env, coord, P, JNI_ABORT);
+    free(sa);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+#endif
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1elements
+  (JNIEnv *env, jclass clss, jint space_id, jint op, jint num_elemn, jbyteArray coord)
+{
+    int ii;
+    hsize_t *lp;
+    hsize_t *llp;
+    jlong *jlp;
+    herr_t status;
+    jbyte *P;
+    jboolean isCopy;
+    jsize size;
+    int nlongs;
+
+    if (coord == NULL) {
+        h5nullArgument( env, "H5Sselect_elements:  coord is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    P = env->GetByteArrayElements(coord,&isCopy);
+#else
+    P = (*env)->GetByteArrayElements(env,coord,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Sselect_elements:  coord not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    size = (int) env->GetArrayLength(coord);
+#else
+    size = (int) (*env)->GetArrayLength(env,coord);
+#endif
+    nlongs = size / sizeof(jlong);
+    lp = (hsize_t *) malloc(nlongs * sizeof(hssize_t));
+    jlp = (jlong *)P;
+    llp = lp;
+    for (ii = 0; ii < nlongs; ii++) {
+        *lp = (hssize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+
+    status = H5Sselect_elements (space_id, (H5S_seloper_t)op, num_elemn, llp);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(coord, P, JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env, coord, P, JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_all
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1all
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Sselect_all(space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_none
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1none
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Sselect_none(space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_valid
+ * Signature: (I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1valid
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    htri_t bval;
+    bval = H5Sselect_valid(space_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1simple_1extent_1npoints
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    hssize_t retVal = -1;
+    retVal =  H5Sget_simple_extent_npoints(space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1npoints
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    hssize_t retVal = -1;
+    retVal =  H5Sget_select_npoints(space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_ndims
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1simple_1extent_1ndims
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    int retVal = -1;
+    retVal =  H5Sget_simple_extent_ndims(space_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_dims
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1simple_1extent_1dims
+  (JNIEnv *env, jclass clss, jint space_id, jlongArray dims, jlongArray maxdims)
+{
+    int status;
+    jlong *dimsP, *maxdimsP;
+    jboolean isCopy;
+    hsize_t *sa;
+    hsize_t *msa;
+    int i;
+    int rank;
+
+    if (dims == NULL) {
+        dimsP = NULL;
+        sa = (hsize_t *)dimsP;
+    } else {
+#ifdef __cplusplus
+        dimsP = env->GetLongArrayElements(dims,&isCopy);
+#else
+        dimsP = (*env)->GetLongArrayElements(env,dims,&isCopy);
+#endif
+        if (dimsP == NULL) {
+            h5JNIFatalError(env,  "H5Pget_simple_extent:  dims not pinned");
+            return -1;
+        }
+#ifdef __cplusplus
+        rank = (int) env->GetArrayLength(dims);
+#else
+        rank = (int) (*env)->GetArrayLength(env,dims);
+#endif
+        sa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+        if (sa == NULL)  {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(dims,dimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+            h5outOfMemory(env,  "H5Sget_simple_extent:  dims not converted to hsize_t");
+            return -1;
+        }
+    }
+    if (maxdims == NULL) {
+        maxdimsP = NULL;
+        msa = (hsize_t *)maxdimsP;
+    } else {
+#ifdef __cplusplus
+        maxdimsP = env->GetLongArrayElements(maxdims,&isCopy);
+#else
+        maxdimsP = (*env)->GetLongArrayElements(env,maxdims,&isCopy);
+#endif
+        if (maxdimsP == NULL) {
+            if (dimsP != NULL) {
+#ifdef __cplusplus
+                 env->ReleaseLongArrayElements(dims,dimsP,JNI_ABORT);
+#else
+                 (*env)->ReleaseLongArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+            }
+            h5JNIFatalError(env,  "H5Pget_simple_extent:  maxdims not pinned");
+            return -1;
+        }
+        if (dimsP == NULL) {
+#ifdef __cplusplus
+            rank = (int) env->GetArrayLength(maxdims);
+#else
+            rank = (int) (*env)->GetArrayLength(env,maxdims);
+#endif
+        }
+        msa = (hsize_t *)malloc( rank * sizeof(hsize_t));
+        if (msa == NULL)  {
+#ifdef __cplusplus
+            if (dimsP != NULL) {
+                 env->ReleaseLongArrayElements(dims,dimsP,JNI_ABORT);
+            }
+            env->ReleaseLongArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+            if (dimsP != NULL) {
+                 (*env)->ReleaseLongArrayElements(env,dims,dimsP,JNI_ABORT);
+             }
+            (*env)->ReleaseLongArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+            if (sa != NULL) {
+                 free(sa);
+            }
+            h5outOfMemory(env,  "H5Sget_simple_extent:  maxdims not converted to hsize_t");
+            return -1;
+        }
+    }
+
+    status = H5Sget_simple_extent_dims(space_id, (hsize_t *)sa, (hsize_t *)msa);
+
+    if (status < 0) {
+        if (dimsP != NULL) {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(dims,dimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+            free(sa);
+        }
+        if (maxdimsP != NULL)  {
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseLongArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+            free(msa);
+        }
+        h5libraryError(env);
+    } else {
+        if (dimsP != NULL) {
+            for (i = 0; i < rank; i++) {
+                dimsP[i] = sa[i];
+            }
+            free(sa);
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(dims,dimsP,0);
+#else
+            (*env)->ReleaseLongArrayElements(env,dims,dimsP,0);
+#endif
+        }
+        if (maxdimsP != NULL) {
+            for (i = 0; i < rank; i++) {
+                maxdimsP[i] = msa[i];
+            }
+            free(msa);
+#ifdef __cplusplus
+            env->ReleaseLongArrayElements(maxdims,maxdimsP,0);
+#else
+            (*env)->ReleaseLongArrayElements(env,maxdims,maxdimsP,0);
+#endif
+        }
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_simple_extent_type
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1simple_1extent_1type
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    H5S_class_t retVal = H5S_NO_CLASS;
+    retVal =  H5Sget_simple_extent_type(space_id);
+    if (retVal == H5S_NO_CLASS) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sset_extent_simple
+ * Signature: (II[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sset_1extent_1simple
+  (JNIEnv *env, jclass clss, jint space_id, jint rank, jbyteArray dims, jbyteArray maxdims)
+{
+    herr_t status;
+    jbyte *dimsP, *maxdimsP;
+    jboolean isCopy;
+    hsize_t *sa;
+    hsize_t *msa;
+    int i;
+    hsize_t *lp;
+    jlong *jlp;
+
+    if (dims == NULL) {
+        h5nullArgument( env, "H5Sset_simple_extent:  dims is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    dimsP = env->GetByteArrayElements(dims,&isCopy);
+#else
+    dimsP = (*env)->GetByteArrayElements(env,dims,&isCopy);
+#endif
+    if (dimsP == NULL) {
+        h5JNIFatalError(env,  "H5Pset_simple_extent:  dims not pinned");
+        return -1;
+    }
+    sa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (sa == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Sset_simple_extent:  dims not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)dimsP;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+    if (maxdims == NULL) {
+        maxdimsP = NULL;
+        msa = (hsize_t *)maxdimsP;
+    } else {
+#ifdef __cplusplus
+        maxdimsP = env->GetByteArrayElements(maxdims,&isCopy);
+#else
+        maxdimsP = (*env)->GetByteArrayElements(env,maxdims,&isCopy);
+#endif
+        if (maxdimsP == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+            h5JNIFatalError(env,  "H5Pset_simple_extent:  maxdims not pinned");
+            return -1;
+        }
+        msa = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (msa == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+            env->ReleaseByteArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+            free (sa);
+            h5outOfMemory(env,  "H5Sset_simple_extent:  maxdims not converted to hsize_t");
+            return -1;
+        }
+        jlp = (jlong *)maxdimsP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+    }
+
+    status = H5Sset_extent_simple(space_id, rank, (hsize_t *)sa, (hsize_t *)msa);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(dims,dimsP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+    free (sa);
+    if (maxdimsP != NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(maxdims,maxdimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,maxdims,maxdimsP,JNI_ABORT);
+#endif
+        free (msa);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sis_simple
+ * Signature: (I)J
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sis_1simple
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    htri_t bval;
+    bval = H5Sis_simple(space_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Soffset_simple
+ * Signature: (I[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Soffset_1simple
+  (JNIEnv *env, jclass clss, jint space_id, jbyteArray offset)
+{
+    herr_t status;
+    jbyte *P = NULL;
+    jboolean isCopy;
+    hssize_t *sa;
+    int rank;
+    int i;
+    hssize_t *lp;
+    jlong *jlp;
+
+    if (offset != NULL) {
+#ifdef __cplusplus
+        P = env->GetByteArrayElements(offset,&isCopy);
+#else
+        P = (*env)->GetByteArrayElements(env,offset,&isCopy);
+#endif
+        if (P == NULL) {
+            h5JNIFatalError(env,  "H5Soffset_simple:  offset not pinned");
+            return -1;
+        }
+#ifdef __cplusplus
+        i = (int) env->GetArrayLength(offset);
+#else
+        i = (int) (*env)->GetArrayLength(env,offset);
+#endif
+        rank = i / sizeof(jlong);
+        sa = lp = (hssize_t *)malloc(rank * sizeof(hssize_t));
+        if (sa == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(offset,P,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,offset,P,JNI_ABORT);
+#endif
+            h5outOfMemory(env,  "H5Soffset_simple:  offset not converted to hssize_t");
+            return -1;
+        }
+        jlp = (jlong *)P;
+        for (i = 0; i < rank; i++) {
+            *lp = (hssize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+    } else {
+        P = NULL;
+        sa = (hssize_t *)P;
+    }
+
+    status = H5Soffset_simple(space_id, sa);
+    if (P != NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(offset,P,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,offset,P,JNI_ABORT);
+#endif
+        free(sa);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sextent_copy
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sextent_1copy
+  (JNIEnv *env, jclass clss, jint space_id, jint src_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Sextent_copy(space_id, src_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sset_extent_none
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sset_1extent_1none
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Sset_extent_none(space_id);
+    if (retVal < 0) {
+        /* throw exception */
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sselect_hyperslab
+ * Signature: (II[B[B[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sselect_1hyperslab
+  (JNIEnv *env, jclass clss, jint space_id, jint op,
+  jbyteArray start, jbyteArray stride, jbyteArray count, jbyteArray block)
+{
+    herr_t status;
+    jbyte *startP, *strideP, *countP, *blockP;
+    jboolean isCopy;
+    hsize_t *strt;
+    hsize_t *strd;
+    hsize_t *cnt;
+    hsize_t *blk;
+    int rank;
+    int i;
+    hsize_t *lp;
+    jlong *jlp;
+
+    if (start == NULL) {
+        h5nullArgument( env, "H5Sselect_hyperslab:  start is NULL");
+        return -1;
+    }
+    if (count == NULL) {
+        h5nullArgument( env, "H5Sselect_hyperslab:  count is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    i = (int)env->GetArrayLength(start);
+    if (i != env->GetArrayLength(count)) {
+        h5badArgument( env, "H5Sselect_hyperslab:  count and start have different rank!");
+
+    }
+#else
+    i = (int)(*env)->GetArrayLength(env,start);
+    if (i != (*env)->GetArrayLength(env,count)) {
+        h5badArgument( env, "H5Sselect_hyperslab:  count and start have different rank!");
+
+    }
+#endif
+    rank = i / sizeof(jlong);
+
+#ifdef __cplusplus
+    startP = env->GetByteArrayElements(start,&isCopy);
+#else
+    startP = (*env)->GetByteArrayElements(env,start,&isCopy);
+#endif
+    if (startP == NULL) {
+        h5JNIFatalError(env,  "H5Sselect_hyperslab:  start not pinned");
+        return -1;
+    }
+    strt = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (strt == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(start,startP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,start,startP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Sselect_hyperslab:  start not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)startP;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+#ifdef __cplusplus
+    countP = env->GetByteArrayElements(count,&isCopy);
+#else
+    countP = (*env)->GetByteArrayElements(env,count,&isCopy);
+#endif
+    if (countP == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+#endif
+        free(strt);
+        h5JNIFatalError(env,  "H5Sselect_hyperslab:  count not pinned");
+        return -1;
+    }
+    cnt = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+    if (cnt == NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+        env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+        (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+#endif
+        free(strt);
+        h5outOfMemory(env,  "H5Sselect_hyperslab:  count not converted to hsize_t");
+        return -1;
+    }
+    jlp = (jlong *)countP;
+    for (i = 0; i < rank; i++) {
+        *lp = (hsize_t)*jlp;
+        lp++;
+        jlp++;
+    }
+    if (stride == NULL) {
+        strideP = NULL;
+        strd = (hsize_t *)strideP;
+    } else {
+#ifdef __cplusplus
+        strideP = env->GetByteArrayElements(stride,&isCopy);
+#else
+        strideP = (*env)->GetByteArrayElements(env,stride,&isCopy);
+#endif
+        if (strideP == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+            env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+#endif
+            free(cnt); free(strt);
+            h5badArgument( env, "H5Sselect_hyperslab:  stride not pinned");
+            return -1;
+        }
+        strd = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (strd == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+            env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+            env->ReleaseByteArrayElements(stride, strideP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,stride, strideP,JNI_ABORT);
+#endif
+            free(cnt); free(strt);
+            h5outOfMemory(env,  "H5Sselect_hyperslab:  stride not converted to hsize_t");
+            return -1;
+        }
+        jlp = (jlong *)strideP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+    }
+    if (block == NULL) {
+        blockP = NULL;
+        blk = (hsize_t *)blockP;
+    } else {
+#ifdef __cplusplus
+        blockP = env->GetByteArrayElements(block,&isCopy);
+#else
+        blockP = (*env)->GetByteArrayElements(env,block,&isCopy);
+#endif
+        if (blockP == NULL)  {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(stride, strideP,JNI_ABORT);
+            env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+            env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,stride, strideP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+#endif
+            free(cnt); free(strt);
+            if (strd != NULL) { free(strd); }
+            h5JNIFatalError(env,  "H5Sselect_hyperslab:  block not pinned");
+            return -1;
+        }
+        blk = lp = (hsize_t *)malloc(rank * sizeof(hsize_t));
+        if (blk == NULL) {
+#ifdef __cplusplus
+            env->ReleaseByteArrayElements(stride, strideP,JNI_ABORT);
+            env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+            env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+            env->ReleaseByteArrayElements(block, blockP,JNI_ABORT);
+#else
+            (*env)->ReleaseByteArrayElements(env,stride, strideP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+            (*env)->ReleaseByteArrayElements(env,block, blockP,JNI_ABORT);
+#endif
+            free(cnt); free(strt);
+            if (strd != NULL) { free(strd); }
+            h5outOfMemory(env,  "H5Sget_simple_extent:  block not converted to hsize_t");
+            return -1;
+        }
+        jlp = (jlong *)blockP;
+        for (i = 0; i < rank; i++) {
+            *lp = (hsize_t)*jlp;
+            lp++;
+            jlp++;
+        }
+    }
+
+    status = H5Sselect_hyperslab (space_id, (H5S_seloper_t)op, (const hsize_t *)strt, (const hsize_t *)strd, (const hsize_t *)cnt, (const hsize_t *)blk);
+
+#ifdef __cplusplus
+    env->ReleaseByteArrayElements(start, startP,JNI_ABORT);
+    env->ReleaseByteArrayElements(count, countP,JNI_ABORT);
+#else
+    (*env)->ReleaseByteArrayElements(env,start, startP,JNI_ABORT);
+    (*env)->ReleaseByteArrayElements(env,count, countP,JNI_ABORT);
+#endif
+    free(strt);
+    free(cnt);
+    if (strideP != NULL) {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(stride, strideP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,stride, strideP,JNI_ABORT);
+#endif
+        free(strd);
+    }
+    if (blockP != NULL)  {
+#ifdef __cplusplus
+        env->ReleaseByteArrayElements(block, blockP,JNI_ABORT);
+#else
+        (*env)->ReleaseByteArrayElements(env,block, blockP,JNI_ABORT);
+#endif
+        free(blk);
+    }
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sclose
+  (JNIEnv *env, jclass clss, jint space_id)
+{
+    herr_t retVal = 0;
+
+	if (space_id > 0)
+        retVal =  H5Sclose(space_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_hyper_nblocks
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks
+  (JNIEnv *env, jclass clss, jint spaceid)
+{
+    hssize_t status;
+
+    status = H5Sget_select_hyper_nblocks((hid_t)spaceid);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jlong)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_elem_npoints
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1elem_1npoints
+  (JNIEnv *env, jclass clss, jint spaceid)
+{
+    hssize_t status;
+
+    status = H5Sget_select_elem_npoints((hid_t)spaceid);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jlong)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_hyper_blocklist
+ * Signature: (IJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist
+  (JNIEnv *env, jclass clss, jint spaceid, jlong startblock, jlong numblocks, jlongArray buf)
+{
+    herr_t status;
+    jlong *bufP;
+    jboolean isCopy;
+    hsize_t *ba;
+    int i;
+    long st;
+    long nb;
+
+    st = (long)startblock;
+    nb = (long)numblocks;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Sget_select_hyper_blocklist:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    bufP = env->GetLongArrayElements(buf,&isCopy);
+#else
+    bufP = (*env)->GetLongArrayElements(env,buf,&isCopy);
+#endif
+    if (bufP == NULL) {
+        h5JNIFatalError( env, "H5Sget_select_hyper_blocklist:  buf not pinned");
+        return -1;
+    }
+    ba = (hsize_t *)malloc( nb * 2 * sizeof(hsize_t));
+    if (ba == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf, bufP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Screate-simple:  buffer not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Sget_select_hyper_blocklist((hid_t)spaceid, (hsize_t)st,
+        (hsize_t)nb, (hsize_t *)ba);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,bufP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,JNI_ABORT);
+#endif
+        free (ba);
+        h5libraryError(env);
+    } else  {
+        for (i = 0; i < (numblocks*2); i++) {
+            bufP[i] = ba[i];
+        }
+        free (ba);
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,bufP,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_elem_pointlist
+ * Signature: (IJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1elem_1pointlist
+  (JNIEnv *env, jclass clss, jint spaceid, jlong startpoint, jlong numpoints, jlongArray buf)
+{
+    herr_t status;
+    jlong *bufP;
+    jboolean isCopy;
+    hsize_t *ba;
+    int i;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "H5Sget_select_elem_pointlist:  buf is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    bufP = env->GetLongArrayElements(buf,&isCopy);
+#else
+    bufP = (*env)->GetLongArrayElements(env,buf,&isCopy);
+#endif
+    if (bufP == NULL) {
+        h5JNIFatalError( env, "H5Sget_select_elem_pointlist:  buf not pinned");
+        return -1;
+    }
+    ba = (hsize_t *)malloc( ((long)numpoints) * sizeof(hsize_t));
+    if (ba == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,bufP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Sget_select_elem_pointlist:  buf not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Sget_select_elem_pointlist((hid_t)spaceid, (hsize_t)startpoint,
+        (hsize_t)numpoints, (hsize_t *)ba);
+
+    if (status < 0) {
+        free (ba);
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,bufP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+        for (i = 0; i < numpoints; i++) {
+            bufP[i] = ba[i];
+        }
+        free (ba) ;
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(buf,bufP,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,buf,bufP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Sget_select_bounds
+ * Signature: (I[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Sget_1select_1bounds
+  (JNIEnv *env, jclass clss, jint spaceid, jlongArray start, jlongArray end)
+{
+    herr_t status;
+    jlong *startP, *endP;
+    jboolean isCopy;
+    hsize_t *strt;
+    hsize_t *en;
+    int rank;
+    int i;
+
+    if ( start == NULL ) {
+        h5nullArgument( env, "H5Sget_select_bounds:  start is NULL");
+        return -1;
+    }
+
+    if ( end == NULL ) {
+        h5nullArgument( env, "H5Sget_select_bounds:  end is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    startP = env->GetLongArrayElements(start,&isCopy);
+#else
+    startP = (*env)->GetLongArrayElements(env,start,&isCopy);
+#endif
+    if (startP == NULL) {
+        h5JNIFatalError( env, "H5Sget_select_bounds:  start not pinned");
+        return -1;
+    }
+#ifdef __cplusplus
+    rank = (int)env->GetArrayLength(start);
+#else
+    rank = (int)(*env)->GetArrayLength(env,start);
+#endif
+    strt = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (strt == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(start,startP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,start,startP,JNI_ABORT);
+#endif
+        h5outOfMemory(env,  "H5Sget_select_bounds:  start not converted to hsize_t");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    endP = env->GetLongArrayElements(end,&isCopy);
+#else
+    endP = (*env)->GetLongArrayElements(env,end,&isCopy);
+#endif
+    if (endP == NULL) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(start,startP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,start,startP,JNI_ABORT);
+#endif
+        free(strt);
+        h5JNIFatalError( env, "H5Sget_select_bounds:  end not pinned");
+        return -1;
+    }
+    en = (hsize_t *)malloc( rank * sizeof(hsize_t));
+    if (en == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(end,endP,JNI_ABORT);
+        env->ReleaseLongArrayElements(start,startP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,end,endP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,start,startP,JNI_ABORT);
+#endif
+        free(strt);
+        h5outOfMemory(env,  "H5Sget_simple_extent:  dims not converted to hsize_t");
+        return -1;
+    }
+
+    status = H5Sget_select_bounds((hid_t) spaceid, (hsize_t *)strt, (hsize_t *)en);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(start,startP,JNI_ABORT);
+        env->ReleaseLongArrayElements(end,endP,JNI_ABORT);
+#else
+        (*env)->ReleaseLongArrayElements(env,start,startP,JNI_ABORT);
+        (*env)->ReleaseLongArrayElements(env,end,endP,JNI_ABORT);
+#endif
+        free(strt); free(en);
+        h5libraryError(env);
+    } else  {
+        for (i = 0; i < rank; i++) {
+            startP[i] = strt[i];
+            endP[i] = en[i];
+        }
+        free(strt); free(en);
+#ifdef __cplusplus
+        env->ReleaseLongArrayElements(start,startP,0);
+        env->ReleaseLongArrayElements(end,endP,0);
+#else
+        (*env)->ReleaseLongArrayElements(env,start,startP,0);
+        (*env)->ReleaseLongArrayElements(env,end,endP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5tImpJHDF5.c b/source/c/jhdf5/h5tImpJHDF5.c
new file mode 100755
index 0000000..50543be
--- /dev/null
+++ b/source/c/jhdf5/h5tImpJHDF5.c
@@ -0,0 +1,1631 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Datatype Object API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+extern jboolean h5raiseException( JNIEnv *env, char *exception, char *message);
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Topen
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Topen
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint access_plist_id)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Topen:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    tname = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    tname = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Topen:  name not pinned");
+        return -1;
+    }
+
+    status = H5Topen((hid_t)loc_id, tname, (hid_t) access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,tname);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,tname);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommit
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tcommit
+  (JNIEnv *env, jclass clss, jint loc_id, jstring name, jint type_id, 
+  jint link_create_plist_id, jint dtype_create_plist_id, 
+  jint dtype_access_plist_id)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tcommit:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    tname = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    tname = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tcommit:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tcommit((hid_t)loc_id, tname, (hid_t)type_id, 
+    	(hid_t)link_create_plist_id, (hid_t)dtype_create_plist_id,
+    	(hid_t)dtype_access_plist_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,tname);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,tname);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcommitted
+ * Signature: (I)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tcommitted
+  (JNIEnv *env, jclass clss, jint type)
+{
+    htri_t bval;
+    bval = H5Tcommitted(type);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        /* raise exception -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcreate
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tcreate
+  (JNIEnv *env, jclass clss, jint dclass, jint size)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tcreate((H5T_class_t )dclass, size );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tcopy
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tcopy
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tcopy(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tequal
+ * Signature: (II)B
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tequal
+  (JNIEnv *env, jclass clss, jint type_id1, jint type_id2)
+{
+    htri_t bval;
+    bval = H5Tequal(type_id1, type_id2 );
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        /* raise exception -- return value is irrelevant */
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tlock
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tlock
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tlock(type_id );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_class
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1class
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_class_t retVal = H5T_NO_CLASS;
+    retVal =  H5Tget_class(type_id );
+    if (retVal == H5T_NO_CLASS) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1size
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal;
+    jint size;
+    retVal =  H5Tget_size(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    size = retVal;
+    if (size != retVal) {
+        h5raiseException( env,
+          "ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException",
+          "H5Tget_size() overflows jint");
+    }
+    return size;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_size_long
+ * Signature: (J)I
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1size_1long
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal;
+    retVal =  H5Tget_size(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_size
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1size
+  (JNIEnv *env, jclass clss, jint type_id, jint size)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_size(type_id, size );
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_order
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1order
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_order_t retVal = H5T_ORDER_ERROR;
+    retVal =  H5Tget_order(type_id );
+    if (retVal == H5T_ORDER_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_order
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1order
+  (JNIEnv *env, jclass clss, jint type_id, jint order)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_order(type_id, (H5T_order_t)order);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_precision
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1precision
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_precision(type_id);
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_precision
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1precision
+  (JNIEnv *env, jclass clss, jint type_id, jint precision)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_precision(type_id, precision);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_offset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1offset
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_offset(type_id);
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_offset
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1offset
+  (JNIEnv *env, jclass clss, jint type_id, jint offset)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_offset(type_id, offset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_pad
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1pad
+  (JNIEnv *env, jclass clss, jint type_id, jintArray pad)
+{
+    herr_t status;
+    jboolean isCopy;
+    jint *P;
+
+    if (pad == NULL) {
+        h5nullArgument( env, "H5Tget_pad:  pad is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    P = env->GetIntArrayElements(pad,&isCopy);
+#else
+    P = (*env)->GetIntArrayElements(env,pad,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Tget_pad:  pad not pinned");
+        return -1;
+    }
+    status = H5Tget_pad(type_id, (H5T_pad_t *)&(P[0]), (H5T_pad_t *)&(P[1]));
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(pad,P,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,pad,P,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(pad,P,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,pad,P,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_pad
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1pad
+  (JNIEnv *env, jclass clss, jint type_id, jint lsb, jint msb)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_pad(type_id, (H5T_pad_t)lsb, (H5T_pad_t)msb);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_sign
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1sign
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_sign_t retVal = H5T_SGN_ERROR;
+    retVal =  H5Tget_sign(type_id);
+    if (retVal == H5T_SGN_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_sign
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1sign
+  (JNIEnv *env, jclass clss, jint type_id, jint sign)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_sign(type_id, (H5T_sign_t)sign);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_fields
+ * Signature: (I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jintArray fields)
+{
+    herr_t status;
+    jboolean isCopy;
+    jint *P;
+
+    if (fields == NULL) {
+        h5nullArgument( env, "H5Tget_fields:  fields is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    if (env->GetArrayLength(fields) < 5) {
+        h5badArgument( env, "H5Tget_fields:  fields input array < order 5");
+    }
+    P = env->GetIntArrayElements(fields,&isCopy);
+#else
+    if ((*env)->GetArrayLength(env, fields) < 5) {
+        h5badArgument( env, "H5Tget_fields:  fields input array < order 5");
+    }
+    P = (*env)->GetIntArrayElements(env,fields,&isCopy);
+#endif
+    if (P == NULL) {
+        h5JNIFatalError(env,  "H5Tget_fields:  fields not pinned");
+        return -1;
+    }
+
+    status = H5Tget_fields(type_id, (size_t *)&(P[0]), (size_t *)&(P[1]), (size_t *)&(P[2]), (size_t *)&(P[3]), (size_t *)&(P[4]));
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(fields,P,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,fields,P,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else  {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(fields,P,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,fields,P,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_fields
+ * Signature: (IIIII)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1fields
+  (JNIEnv *env, jclass clss, jint type_id, jint spos, jint epos,
+  jint esize, jint mpos, jint msiz)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_fields(type_id, spos, epos, esize, mpos, msiz);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_ebias
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1ebias
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_ebias(type_id );
+    if (retVal == 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_ebias
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1ebias
+  (JNIEnv *env, jclass clss, jint type_id, jint ebias)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_ebias(type_id, ebias);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_norm
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1norm
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_norm_t retVal = H5T_NORM_ERROR;
+    retVal =  H5Tget_norm(type_id);
+    if (retVal == H5T_NORM_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_norm
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1norm
+  (JNIEnv *env, jclass clss, jint type_id, jint norm)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_norm(type_id, (H5T_norm_t )norm);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_inpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1inpad
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_pad_t retVal = H5T_PAD_ERROR;
+    retVal =  H5Tget_inpad(type_id );
+    if (retVal == H5T_PAD_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_inpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1inpad
+  (JNIEnv *env, jclass clss, jint type_id, jint inpad)
+{
+    herr_t retVal = -1;
+    retVal = H5Tset_inpad(type_id, (H5T_pad_t) inpad);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_cset
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1cset
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_cset_t retVal = H5T_CSET_ERROR;
+    retVal =  H5Tget_cset(type_id);
+    if (retVal == H5T_CSET_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_cset
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1cset
+  (JNIEnv *env, jclass clss, jint type_id, jint cset)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_cset(type_id, (H5T_cset_t)cset);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_strpad
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1strpad
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    H5T_str_t retVal = H5T_STR_ERROR;
+    retVal =  H5Tget_strpad(type_id);
+    if (retVal == H5T_STR_ERROR) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_strpad
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1strpad
+  (JNIEnv *env, jclass clss, jint type_id, jint strpad)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tset_strpad(type_id, (H5T_str_t)strpad);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_nmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1nmembers
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    int retVal = -1;
+    retVal =  H5Tget_nmembers(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_name
+ * Signature: (II)Ljava/lang/String
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1member_1name
+  (JNIEnv *env, jclass clss, jint type_id, jint field_idx)
+{
+    char *name;
+    jstring str;
+
+    name = H5Tget_member_name(type_id, (unsigned) field_idx);
+
+    if (name == NULL) {
+        return NULL;
+    } else {
+        /* may throw OutOfMemoryError */
+#ifdef __cplusplus
+        str = env->NewStringUTF(name);
+#else
+        str = (*env)->NewStringUTF(env,name);
+#endif
+        if (str == NULL)  {
+            free(name);
+            h5outOfMemory(env,  "H5Tget_member_name:  returned string not created");
+            return NULL;
+        }
+        free(name);
+        return str;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_index
+ * Signature: (ILjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1member_1index
+  (JNIEnv *env, jclass clss, jint type_id, jstring field_name)
+{
+    char *tname;
+    int index;
+    jboolean isCopy;
+
+    if (field_name == NULL) {
+        h5nullArgument( env, "H5Tget_member_index:  field_name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    tname = (char *)env->GetStringUTFChars(field_name,&isCopy);
+#else
+    tname = (char *)(*env)->GetStringUTFChars(env,field_name,&isCopy);
+#endif
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tget_member_index:  field_name not pinned");
+        return -1;
+    }
+
+    index = H5Tget_member_index(type_id, tname);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(field_name,tname);
+#else
+    (*env)->ReleaseStringUTFChars(env,field_name,tname);
+#endif
+
+    return index;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_type
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1member_1type
+  (JNIEnv *env, jclass clss, jint type_id, jint field_idx)
+{
+    hid_t retVal = -1;
+    retVal =  H5Tget_member_type(type_id, field_idx);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_offset
+ * Signature: (II)I
+ */
+JNIEXPORT jlong JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1member_1offset
+  (JNIEnv *env, jclass clss, jint type_id, jint memno)
+{
+    size_t retVal = 0;
+    retVal =  H5Tget_member_offset((hid_t)type_id, memno);
+    return (jlong)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tinsert
+ * Signature: (ILjava/lang/String;JI)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tinsert
+  (JNIEnv *env, jclass clss, jint type_id, jstring name, jlong offset, jint field_id)
+{
+    herr_t status;
+    char* tname;
+    jboolean isCopy;
+    long off;
+
+    off = (long)offset;
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tinsert:  name is NULL");
+        return -1;
+    }
+#ifdef __cplusplus
+    tname =(char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    tname =(char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (tname == NULL) {
+        h5JNIFatalError(env,  "H5Tinsert:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tinsert(type_id, tname, (size_t)off, field_id);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,tname);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,tname);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+    return (jint)status;
+
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tpack
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tpack
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = -1;
+    retVal =  H5Tpack(type_id);
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tclose
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tclose
+  (JNIEnv *env, jclass clss, jint type_id)
+{
+    herr_t retVal = 0;
+
+	if (type_id > 0)
+        retVal =  H5Tclose(type_id);
+
+    if (retVal < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retVal;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1create
+  (JNIEnv *env, jclass clss, jint base_id)
+{
+    hid_t status;
+
+    status =  H5Tenum_create((hid_t)base_id);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert
+ * Signature: (ILjava/lang/String;B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1insert__ILjava_lang_String_2B
+  (JNIEnv *env, jclass clss, jint type, jstring name, jbyte value)
+{
+    herr_t status;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_insert:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    nameP = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    nameP = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_insert:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_insert((hid_t)type, nameP, &value);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,nameP);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert
+ * Signature: (ILjava/lang/String;S)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1insert__ILjava_lang_String_2S
+  (JNIEnv *env, jclass clss, jint type, jstring name, jshort value)
+{
+    herr_t status;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_insert:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    nameP = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    nameP = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_insert:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_insert((hid_t)type, nameP, &value);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,nameP);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_insert
+ * Signature: (ILjava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1insert__ILjava_lang_String_2I
+  (JNIEnv *env, jclass clss, jint type, jstring name, jint value)
+{
+    herr_t status;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_insert:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    nameP = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    nameP = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_insert:  name not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_insert((hid_t)type, nameP, &value);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,nameP);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tconvert_to_little_endian
+ * Signature: ([S)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tconvert_1to_1little_1endian___3S
+  (JNIEnv *env, jclass clss, jshortArray value)
+{
+    jshort *byteP;
+    jboolean isCopy;
+    size_t nelem;
+    herr_t status;
+    int i;
+
+#ifdef __cplusplus
+    nelem = env->GetArrayLength(value);
+    byteP = env->GetShortArrayElements(value,&isCopy);
+#else
+    nelem = (*env)->GetArrayLength(env, value);
+    byteP = (*env)->GetShortArrayElements(env,value,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Tconvert_to_little_endian:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tconvert(H5T_NATIVE_INT16, H5T_STD_I16LE, nelem, byteP, NULL, H5P_DEFAULT);
+
+#ifdef __cplusplus
+    env->ReleaseShortArrayElements(value,byteP,0);
+#else
+    (*env)->ReleaseShortArrayElements(env,value,byteP,0);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tconvert_to_little_endian
+ * Signature: ([I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tconvert_1to_1little_1endian___3I
+  (JNIEnv *env, jclass clss, jintArray value)
+{
+    jint *byteP;
+    jboolean isCopy;
+    size_t nelem;
+    herr_t status;
+
+#ifdef __cplusplus
+    nelem = env->GetArrayLength(value);
+    byteP = env->GetIntArrayElements(value,&isCopy);
+#else
+    nelem = (*env)->GetArrayLength(env, value);
+    byteP = (*env)->GetIntArrayElements(env,value,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Tconvert_to_little_endian:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tconvert(H5T_NATIVE_INT32, H5T_STD_I32LE, nelem, byteP, NULL, H5P_DEFAULT);
+
+#ifdef __cplusplus
+    env->ReleaseIntArrayElements(value,byteP,0);
+#else
+    (*env)->ReleaseIntArrayElements(env,value,byteP,0);
+#endif
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_nameof
+ * Signature: (I[B[Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1nameof
+  (JNIEnv *env, jclass clss, jint type, jintArray value, jobjectArray name, jint size)
+{
+    hid_t status;
+    jint *byteP;
+    char *nameP;
+    jboolean isCopy;
+    jstring str;
+
+    if (size <= 0) {
+        h5badArgument( env, "H5Tenum_nameof:  name size < 0");
+        return -1;
+    }
+
+    nameP = (char *)malloc(sizeof(char)*size);
+    if (nameP == NULL) {
+        /* exception -- out of memory */
+        h5outOfMemory( env, "H5Tenum_nameof:  malloc name size");
+        return -1;
+    }
+
+    if ( value == NULL ) {
+        free(nameP);
+        h5nullArgument( env, "H5Tenum_nameof:  value is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    byteP = env->GetIntArrayElements(value,&isCopy);
+#else
+    byteP = (*env)->GetIntArrayElements(env,value,&isCopy);
+#endif
+    if (byteP == NULL) {
+        free(nameP);
+        h5JNIFatalError( env, "H5Tenum_nameof:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_nameof((hid_t)type, byteP, nameP, (size_t)size);
+
+#ifdef __cplusplus
+    env->ReleaseIntArrayElements(value,byteP,JNI_ABORT);
+#else
+    (*env)->ReleaseIntArrayElements(env,value,byteP,JNI_ABORT);
+#endif
+
+    if (status < 0) {
+        free(nameP);
+        h5libraryError(env);
+    }
+    else {
+#ifdef __cplusplus
+        str = env->NewStringUTF(nameP);
+#else
+        str = (*env)->NewStringUTF(env,nameP);
+#endif
+        if (str == NULL) {
+            free(nameP);
+            h5outOfMemory( env, "H5Tenum_nameof:  return array not created");
+            return -1;
+        }
+        /*  SetObjectArrayElement may raise exceptions */
+#ifdef __cplusplus
+        env->SetObjectArrayElement(name,0,(jobject)str);
+#else
+        (*env)->SetObjectArrayElement(env,name,0,(jobject)str);
+#endif
+    }
+
+    free(nameP);
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tenum_valueof
+ * Signature: (ILjava/lang/String;[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tenum_1valueof
+  (JNIEnv *env, jclass clss, jint type, jstring name, jintArray value)
+{
+    hid_t status;
+    jint *byteP;
+    char *nameP;
+    jboolean isCopy;
+
+    if (name == NULL) {
+        h5nullArgument( env, "H5Tenum_valueof:  name is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    nameP = (char *)env->GetStringUTFChars(name,&isCopy);
+#else
+    nameP = (char *)(*env)->GetStringUTFChars(env,name,&isCopy);
+#endif
+    if (nameP == NULL) {
+        h5JNIFatalError( env, "H5Tenum_valueof:  name not pinned");
+        return -1;
+    }
+
+    if ( value == NULL ) {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(name,nameP);
+#else
+        (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+        h5nullArgument( env, "H5Tenum_valueof:  value is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    byteP = env->GetIntArrayElements(value,&isCopy);
+#else
+    byteP = (*env)->GetIntArrayElements(env,value,&isCopy);
+#endif
+    if (byteP == NULL)  {
+#ifdef __cplusplus
+        env->ReleaseStringUTFChars(name,nameP);
+#else
+        (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+        h5JNIFatalError( env, "H5Tenum_valueof:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tenum_valueof((hid_t)type, nameP, byteP);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(name,nameP);
+#else
+    (*env)->ReleaseStringUTFChars(env,name,nameP);
+#endif
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(value,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,value,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(value,byteP,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,value,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tvlen_create
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tvlen_1create
+  (JNIEnv *env, jclass clss, jint base_id)
+{
+    hid_t status;
+
+    status = H5Tvlen_create((hid_t)base_id);
+    if (status < 0)
+        h5libraryError(env);
+
+    return status;
+}
+
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tset_tag
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tset_1tag
+  (JNIEnv *env, jclass clss, jint type, jstring tag)
+{
+    herr_t status;
+    char *tagP;
+    jboolean isCopy;
+
+    if (tag == NULL) {
+        h5nullArgument( env, "H5Tset_tag:  tag is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    tagP = (char *)env->GetStringUTFChars(tag,&isCopy);
+#else
+    tagP = (char *)(*env)->GetStringUTFChars(env,tag,&isCopy);
+#endif
+    if (tagP == NULL) {
+        h5JNIFatalError( env, "H5Tset_tag:  tag not pinned");
+        return -1;
+    }
+
+    status = H5Tset_tag((hid_t)type, tagP);
+
+#ifdef __cplusplus
+    env->ReleaseStringUTFChars(tag,tagP);
+#else
+    (*env)->ReleaseStringUTFChars(env,tag,tagP);
+#endif
+
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_tag
+ * Signature: (I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1tag
+  (JNIEnv *env, jclass clss, jint type)
+{
+    jstring str;
+    char *tag;
+
+    tag = H5Tget_tag((hid_t)type);
+
+    if (tag == NULL)
+        return NULL;
+
+    /* may throw OutOfMemoryError */
+#ifdef __cplusplus
+    str = env->NewStringUTF(tag);
+#else
+    str = (*env)->NewStringUTF(env,tag);
+#endif
+    if (str == NULL)  {
+        free(tag);
+        h5outOfMemory(env,  "H5Tget_tag:  returned string not created");
+        return NULL;
+    }
+
+    free(tag);
+    return str;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_super
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1super
+  (JNIEnv *env, jclass clss, jint type)
+{
+    hid_t status;
+
+    status = H5Tget_super((hid_t)type);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_member_value
+ * Signature: (II[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1member_1value
+  (JNIEnv *env, jclass clss, jint type, jint membno, jintArray value)
+{
+    hid_t status;
+    jint *byteP;
+    jboolean isCopy;
+
+    if ( value == NULL ) {
+        h5nullArgument( env, "H5Tget_member_value:  value is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    byteP = env->GetIntArrayElements(value,&isCopy);
+#else
+    byteP = (*env)->GetIntArrayElements(env,value,&isCopy);
+#endif
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "H5Tget_member_value:  value not pinned");
+        return -1;
+    }
+
+    status = H5Tget_member_value((hid_t)type, (int)membno, byteP);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(value,byteP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,value,byteP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(value,byteP,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,value,byteP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tarray_create
+ * Signature: (II[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tarray_1create
+  (JNIEnv *env, jclass clss, jint base, jint rank, jintArray dims)
+{
+    hid_t status;
+    jint *dimsP;
+    int dlen;
+    hsize_t *cdims;
+    jboolean isCopy;
+    int i;
+
+    if (rank <= 0) {
+        h5nullArgument( env, "H5Tarray_create:  rank is < 1");
+        return -1;
+    }
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tarray_create:  dims is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    dimsP = env->GetIntArrayElements(dims,&isCopy);
+#else
+    dimsP = (*env)->GetIntArrayElements(env,dims,&isCopy);
+#endif
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tarray_create:  dimsP not pinned");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    dlen = env->GetArrayLength(dims);
+#else
+    dlen = (*env)->GetArrayLength(env,dims);
+#endif
+    if (dlen != rank) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(dims,dimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+        return -1;
+    }
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+
+    for (i = 0; i < dlen; i++) {
+        cdims[i] = (hsize_t)dimsP[i];
+    }
+
+    status = H5Tarray_create((hid_t)base, (int)rank, (const hsize_t *)cdims);
+
+#ifdef __cplusplus
+    env->ReleaseIntArrayElements(dims,dimsP,JNI_ABORT);
+#else
+    (*env)->ReleaseIntArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_array_dims
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1array_1ndims
+  (JNIEnv *env, jclass clss, jint dt)
+{
+    hid_t status;
+
+    status = H5Tget_array_ndims((hid_t)dt);
+    if (status < 0)
+        h5libraryError(env);
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tarray_get_dims
+ * Signature: (I[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1array_1dims
+  (JNIEnv *env, jclass clss, jint dt, jintArray dims)
+{
+    hid_t status;
+    jint *dimsP;
+    int dlen;
+    int i;
+    hsize_t *cdims;
+    jboolean isCopy;
+
+    if ( dims == NULL ) {
+        h5nullArgument( env, "H5Tget_array_dims:  value is NULL");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    dimsP = env->GetIntArrayElements(dims,&isCopy);
+#else
+    dimsP = (*env)->GetIntArrayElements(env,dims,&isCopy);
+#endif
+    if (dimsP == NULL) {
+        h5JNIFatalError( env, "H5Tget_array_dims:  dimsP not pinned");
+        return -1;
+    }
+
+#ifdef __cplusplus
+    dlen = env->GetArrayLength(dims);
+#else
+    dlen = (*env)->GetArrayLength(env,dims);
+#endif
+    cdims = (hsize_t *)malloc(dlen * sizeof(hsize_t));
+
+    status = H5Tget_array_dims((hid_t)dt, (hsize_t *)cdims);
+
+    if (status < 0) {
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(dims,dimsP,JNI_ABORT);
+#else
+        (*env)->ReleaseIntArrayElements(env,dims,dimsP,JNI_ABORT);
+#endif
+        h5libraryError(env);
+    } else {
+        for (i = 0; i < dlen; i++) {
+            dimsP[i] = (jint) cdims[i];
+        }
+#ifdef __cplusplus
+        env->ReleaseIntArrayElements(dims,dimsP,0);
+#else
+        (*env)->ReleaseIntArrayElements(env,dims,dimsP,0);
+#endif
+    }
+
+    return (jint)status;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tis_variable_str(hid_t dtype_id )
+ * Signature: (I)J
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tis_1variable_1str
+  (JNIEnv *env, jclass clss, jint dtype_id)
+{
+    htri_t bval;
+    bval = H5Tis_variable_str((hid_t)dtype_id);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tget_native_type(hid_t type_id, H5T_direction_t direction )
+ * Signature: (I)J
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tget_1native_1type
+  (JNIEnv *env, jclass clss, jint dtype_id, jint direction)
+{
+    hid_t native_tid;
+
+    native_tid = H5Tget_native_type((hid_t)dtype_id, (H5T_direction_t)direction);
+
+    if (native_tid < 0){
+        h5libraryError(env);
+        return -1;
+    }
+
+    return (jint)native_tid;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Tdetect_class(hid_t dtype_id, H5T_class_t dtype_class )
+ * Signature: (I)J
+ */
+JNIEXPORT jboolean JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Tdetect_1class
+  (JNIEnv *env, jclass clss, jint dtype_id, jint dtype_class)
+{
+    htri_t bval;
+    bval = H5Tdetect_class((hid_t)dtype_id, (H5T_class_t)dtype_class);
+    if (bval > 0) {
+        return JNI_TRUE;
+    } else if (bval == 0) {
+        return JNI_FALSE;
+    } else {
+        h5libraryError(env);
+        return JNI_FALSE;
+    }
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/h5utilJHDF5.c b/source/c/jhdf5/h5utilJHDF5.c
new file mode 100755
index 0000000..eaa885f
--- /dev/null
+++ b/source/c/jhdf5/h5utilJHDF5.c
@@ -0,0 +1,407 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF5.  The full HDF5 copyright notice, including     *
+ * terms governing use, modification, and redistribution, is contained in    *
+ * the files COPYING and Copyright.html.  COPYING can be found at the root   *
+ * of the source code distribution tree; Copyright.html can be found at the  *
+ * root level of an installed copy of the electronic HDF5 document set and   *
+ * is linked from the top-level documents page.  It can also be found at     *
+ * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html.  If you do not have     *
+ * access to either file, you may request a copy from hdfhelp at ncsa.uiuc.edu. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5utilJHDF5.h"
+
+int h5str_dump_region_jhdf5(h5str_t *str, hid_t region);
+static hbool_t h5tools_is_zero_jhdf5(const void *_mem, size_t size);
+
+/** frees memory held by aray of strings */
+void  h5str_array_free_jhdf5(char **strs, size_t len)
+{
+    size_t i;
+
+    if (!strs || len <=0)
+        return;
+
+    for (i=0; i<len; i++) {
+        if (*(strs+i))
+            free (*(strs+i));
+    } /* for (i=0; i<n; i++)*/
+    free(strs);
+}
+
+/** allocate a new str with given length */
+void h5str_new_jhdf5(h5str_t *str, size_t len)
+{
+	if (str && len > 0)
+	{
+		str->s = (char *) malloc(len);
+		str->max = len;
+		str->s[0] = '\0';
+	}
+}
+
+/** free string memory */
+void h5str_free_jhdf5(h5str_t *str)
+{
+	if (str && str->max>0)
+	{
+		free(str->s);
+		memset(str, 0, sizeof(h5str_t));
+	}
+}
+
+/** reset the max size of the string */
+void h5str_resize_jhdf5 (h5str_t *str, size_t new_len)
+{
+	char *new_str;
+
+	if (!str || new_len<=0 || str->max == new_len)
+		return;
+
+	new_str = (char *)malloc(new_len);
+	if (new_len > str->max) /* increase memory */
+		strcpy(new_str, str->s);
+	else 
+		strncpy(new_str, str->s, new_len-1);
+
+	free(str->s);
+	str->s = new_str;
+	str->max = new_len;
+}
+
+/* appends a copy of the string pointed to by cstr to the h5str.
+	Return Value:
+	the char string point to str->s
+*/
+char* h5str_append_jhdf5 (h5str_t *str, const char* cstr)
+{
+	size_t len;
+
+	if (!str)
+		return NULL;
+	else if (!cstr)
+		return str->s;
+
+	len = strlen(str->s) + strlen(cstr);
+	while (len >= str->max) /* not enough to hold the new string, double the space */
+	{
+		h5str_resize_jhdf5(str, str->max*2);
+	}
+
+	return strcat(str->s, cstr);
+}
+
+/** print value of a data point into string.
+	Return Value:
+		On success, the total number of characters printed is returned.
+		On error, a negative number is returned.
+*/
+int h5str_sprintf_jhdf5(h5str_t *str, hid_t container, hid_t tid, void *ptr)
+{
+	unsigned char		tmp_uchar = 0;
+	char				tmp_char = 0;
+	unsigned short		tmp_ushort = 0;
+	short				tmp_short = 0;
+	unsigned int		tmp_uint = 0;
+	int					tmp_int = 0;
+	unsigned long		tmp_ulong = 0;
+	long				tmp_long = 0;
+	float				tmp_float = 0;
+	double				tmp_double = 0.0;
+
+	size_t offset, size;
+	char *cptr = (char*)ptr;
+	unsigned char *ucptr = (unsigned char*)ptr;
+	char *this_str;
+	int	this_strlen, i, n;
+	hid_t mtid = -1;
+	H5T_class_t tclass = H5Tget_class(tid);
+	hvl_t *vlptr;
+
+	if (!str || !ptr)
+		return -1;
+
+	this_str = NULL;
+	this_strlen = 0;
+
+    if (H5Tequal(tid, H5T_NATIVE_SCHAR))
+	{
+		this_str = (char*)malloc(7);
+        memcpy(&tmp_char, ptr, 1);
+		sprintf(this_str, "%d", tmp_char);
+    } else if (H5Tequal(tid, H5T_NATIVE_UCHAR))
+	{
+		this_str = (char*)malloc(7);
+        memcpy(&tmp_uchar, ptr, 1);
+		sprintf(this_str, "%u", tmp_uchar);
+    } else if (H5Tequal(tid, H5T_NATIVE_SHORT))
+	{
+		this_str = (char*)malloc(9);
+        memcpy(&tmp_short, ptr, 2);
+		sprintf(this_str, "%d", tmp_short);
+    } else if (H5Tequal(tid, H5T_NATIVE_USHORT))
+	{
+		this_str = (char*)malloc(9);
+        memcpy(&tmp_ushort, ptr, 2);
+		sprintf(this_str, "%u", tmp_ushort);
+	} else if (H5Tequal(tid, H5T_NATIVE_INT))
+	{
+		this_str = (char*)malloc(14);
+        memcpy(&tmp_int, ptr, 4);
+		sprintf(this_str, "%d", tmp_int);
+    } else if (H5Tequal(tid, H5T_NATIVE_UINT))
+	{
+		this_str = (char*)malloc(14);
+        memcpy(&tmp_uint, ptr, 4);
+		sprintf(this_str, "%u", tmp_uint);
+    } else if (H5Tequal(tid, H5T_NATIVE_LONG)) {
+		this_str = (char*)malloc(23);
+        memcpy(&tmp_long, ptr, sizeof(long));
+		sprintf(this_str, "%ld", tmp_long);
+    } else if (H5Tequal(tid, H5T_NATIVE_ULONG))
+	{
+		this_str = (char*)malloc(23);
+        memcpy(&tmp_ulong, ptr, sizeof(unsigned long));
+		sprintf(this_str, "%lu", tmp_ulong);
+    } else if (H5Tequal(tid, H5T_STD_REF_OBJ))
+	{
+		this_str = (char*)malloc(23);
+        memcpy(&tmp_ulong, ptr, sizeof(void *));
+		sprintf(this_str, "%lu", tmp_ulong);
+	} else 	if (H5Tequal(tid, H5T_NATIVE_FLOAT))
+	{
+		this_str = (char*)malloc(25);
+        memcpy(&tmp_float, ptr, sizeof(float));
+		sprintf(this_str, "%f", tmp_float);
+    } else if (H5Tequal(tid, H5T_NATIVE_DOUBLE)) {
+		this_str = (char*)malloc(25);
+        memcpy(&tmp_double, ptr, sizeof(double));
+		sprintf(this_str, "%f", tmp_double);
+    } else if (tclass == H5T_STRING)
+	{
+        char *tmp_str;
+		size = 0;
+
+        if(H5Tis_variable_str(tid)) 
+		{
+            tmp_str = *(char**)ptr;
+            if(tmp_str) size = strlen(tmp_str);
+        } else 
+		{
+            tmp_str = cptr;
+            size = H5Tget_size(tid);
+        }
+
+		if (size > 0)
+		{
+			this_str = (char *)malloc(size);
+			strcpy(this_str, tmp_str);
+		}
+    } else if (tclass == H5T_COMPOUND)
+	{
+        n = H5Tget_nmembers(tid);
+		h5str_append_jhdf5(str, " {");
+
+        for (i = 0; i < n; i++)
+		{
+			offset = H5Tget_member_offset(tid, i);
+            mtid = H5Tget_member_type(tid ,i);
+            h5str_sprintf_jhdf5(str, container, mtid, cptr+offset);
+            if (i<n-1) strcat(str->s, ", ");
+            H5Tclose(mtid);
+        }
+		h5str_append_jhdf5(str, "} ");
+    } else if (tclass == H5T_ARRAY)
+	{
+        int rank=0;
+        hsize_t dims[H5S_MAX_RANK], total_elmts;
+
+		h5str_append_jhdf5(str, "[ ");
+
+        mtid = H5Tget_super(tid);
+        size = H5Tget_size(mtid);
+        rank = H5Tget_array_ndims(tid);
+        H5Tget_array_dims(tid, dims);
+
+		total_elmts = 1;
+        for (i=0; i<rank; i++)
+			total_elmts *= dims[i];
+
+        for (i = 0; i < total_elmts; i++)
+		{
+            h5str_sprintf_jhdf5(str, container, mtid, cptr + i * size);
+			if (i<total_elmts-1) strcat(str->s, ", ");
+        }
+        H5Tclose(mtid);
+		h5str_append_jhdf5(str, "] ");
+    } else if (tclass == H5T_VLEN)
+	{
+        mtid = H5Tget_super(tid);
+        size = H5Tget_size(mtid);
+
+		vlptr = (hvl_t *)cptr;
+
+        n = vlptr->len;
+        for (i = 0; i < n; i++)
+		{
+			h5str_sprintf_jhdf5(str, container, mtid, ((char *)(vlptr->p)) + i * size);
+        	if (i<n-1) strcat(str->s, ", ");
+		}
+        H5Tclose(mtid);
+    } else if (H5Tequal(tid, H5T_STD_REF_DSETREG)) {
+        /*
+         * Dataset region reference -- show the type and OID of the referenced
+         * object, but we are unable to show the region yet because there
+         * isn't enough support in the data space layer.  - rpm 19990604
+         */
+        if (h5tools_is_zero_jhdf5(ptr, H5Tget_size(tid))) {
+            h5str_append_jhdf5(str, "NULL");
+        } else {
+            char         obj_info[128];
+            hid_t        obj, region;
+            H5G_stat_t   sb;
+
+            /* get name of the dataset the region reference points to using H5Rget_name */
+            obj = H5Rdereference(container, H5R_DATASET_REGION, ptr);
+            H5Gget_objinfo(obj, ".", 0, &sb);
+            sprintf(obj_info, "%lu:%lu ", sb.objno[1], sb.objno[0]);
+            h5str_append_jhdf5(str, obj_info);
+
+            region = H5Rget_region(container, H5R_DATASET_REGION, ptr);
+            h5str_dump_region_jhdf5(str, region);
+            H5Sclose(region);
+            H5Dclose(obj);
+        }
+    } else /* All other types get printed as hexadecimal */
+	{
+        n = H5Tget_size(tid);
+		this_str = (char*)malloc(4*(n+1));
+
+        if (1==n)
+		{
+			sprintf(this_str, "0x%02x", ucptr[0]);
+        } else
+		{
+            for (i = 0; i < n; i++)
+				sprintf(this_str, "%s%02x", i?":":"", ucptr[i]);
+        }
+
+    }
+
+    if (this_str)
+	{
+		h5str_append_jhdf5(str, this_str);
+		this_strlen = strlen(this_str);
+		free (this_str);
+	}
+
+	return this_strlen;
+}
+
+/* dumps region reference information into a string */
+int h5str_dump_region_jhdf5(h5str_t *str, hid_t region)
+{
+    hssize_t    nblocks, npoints;
+    hsize_t     alloc_size;
+    hsize_t     *ptdata;
+    int         ndims = H5Sget_simple_extent_ndims(region);
+    char        tmp_str[256];
+
+    /*
+     * These two functions fail if the region does not have blocks or points,
+     * respectively. They do not currently know how to translate from one to
+     * the other.
+     */
+    H5E_BEGIN_TRY {
+        nblocks = H5Sget_select_hyper_nblocks(region);
+        npoints = H5Sget_select_elem_npoints(region);
+    } H5E_END_TRY;
+
+    h5str_append_jhdf5(str, "{");
+
+    /* Print block information */
+    if (nblocks > 0) {
+        int i;
+
+        alloc_size = nblocks * ndims * 2 * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t)alloc_size)) {
+            ptdata = malloc((size_t)alloc_size);
+            H5Sget_select_hyper_blocklist(region, (hsize_t)0, (hsize_t)nblocks, ptdata);
+
+            for (i = 0; i < nblocks; i++) {
+                int j;
+    
+                h5str_append_jhdf5(str, " ");
+    
+                /* Start coordinates and opposite corner */
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : "(", (unsigned long)ptdata[i * 2 * ndims + j]);
+                    h5str_append_jhdf5(str, tmp_str);
+                }
+    
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : ")-(", (unsigned long)ptdata[i * 2 * ndims + j + ndims]);
+                    h5str_append_jhdf5(str, tmp_str);
+                }
+                h5str_append_jhdf5(str, ") ");
+                tmp_str[0] = '\0';
+            }
+    
+            free(ptdata);        
+        } /* if (alloc_size == (hsize_t)((size_t)alloc_size)) */
+    } /* if (nblocks > 0) */
+
+    /* Print point information */
+    if (npoints > 0) {
+        int i;
+
+        alloc_size = npoints * ndims * sizeof(ptdata[0]);
+        if (alloc_size == (hsize_t)((size_t)alloc_size)) {
+            ptdata = malloc((size_t)alloc_size);
+            H5Sget_select_elem_pointlist(region, (hsize_t)0, (hsize_t)npoints, ptdata);
+    
+            for (i = 0; i < npoints; i++) {
+                int j;
+    
+                h5str_append_jhdf5(str, " ");
+
+                for (j = 0; j < ndims; j++) {
+                    tmp_str[0] = '\0';
+                    sprintf(tmp_str, "%s%lu", j ? "," : "(", (unsigned long)(ptdata[i * ndims + j]));
+                    h5str_append_jhdf5(str, tmp_str);
+                }
+    
+                h5str_append_jhdf5(str, ") ");
+            }
+    
+            free(ptdata);
+        }
+    }
+
+    h5str_append_jhdf5(str, "}");
+
+    return 0;
+}
+
+
+static hbool_t h5tools_is_zero_jhdf5(const void *_mem, size_t size)
+{
+    const unsigned char *mem = (const unsigned char *)_mem;
+
+    while (size-- > 0)
+        if (mem[size])
+            return 0;
+
+    return 1;
+}
+
+
diff --git a/source/c/jhdf5/h5utilJHDF5.h b/source/c/jhdf5/h5utilJHDF5.h
new file mode 100755
index 0000000..69ab012
--- /dev/null
+++ b/source/c/jhdf5/h5utilJHDF5.h
@@ -0,0 +1,30 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF5.  The full HDF5 copyright notice, including     *
+ * terms governing use, modification, and redistribution, is contained in    *
+ * the files COPYING and Copyright.html.  COPYING can be found at the root   *
+ * of the source code distribution tree; Copyright.html can be found at the  *
+ * root level of an installed copy of the electronic HDF5 document set and   *
+ * is linked from the top-level documents page.  It can also be found at     *
+ * http://hdf.ncsa.uiuc.edu/HDF5/doc/Copyright.html.  If you do not have     *
+ * access to either file, you may request a copy from hdfhelp at ncsa.uiuc.edu. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef H5UTIL_H__
+#define H5UTIL_H__
+
+typedef struct h5str_t {
+    char	*s;
+    size_t	max;  /* the allocated size of the string */
+} h5str_t;
+
+void	h5str_new_jhdf5 (h5str_t *str, size_t len);
+void	h5str_free_jhdf5 (h5str_t *str);
+void	h5str_resize_jhdf5 (h5str_t *str, size_t new_len);
+char*	h5str_append_jhdf5 (h5str_t *str, const char* cstr);
+int	h5str_sprintf_jhdf5(h5str_t *str, hid_t container, hid_t tid, void *buf);
+void    h5str_array_free_jhdf5(char **strs, size_t len);
+
+#endif  /* H5UTIL_H__ */
diff --git a/source/c/jhdf5/h5zImpJHDF5.c b/source/c/jhdf5/h5zImpJHDF5.c
new file mode 100755
index 0000000..509ae0a
--- /dev/null
+++ b/source/c/jhdf5/h5zImpJHDF5.c
@@ -0,0 +1,107 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+/*
+ *  This code is the C-interface called by Java programs to access the
+ *  Reference API Functions of the HDF5 library.
+ *
+ *  Each routine wraps a single HDF entry point, generally with the
+ *  analogous arguments and return codes.
+ *
+ *  For details of the HDF libraries, see the HDF Documentation at:
+ *   http://hdf.ncsa.uiuc.edu/HDF5/doc/
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5badArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Zunregister(H5Z_filter_t filter)
+ * Signature: ([BILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Zunregister
+  (JNIEnv *env, jclass clss,
+  jbyteArray ref, jint loc_id, jstring name, jint filter)
+{
+    herr_t retValue;
+
+    retValue = H5Zunregister((H5Z_filter_t)filter);
+
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Method:    H5Zfilter_avail(H5Z_filter_t filter)
+ * Signature: ([BILjava/lang/String;II)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Zfilter_1avail
+  (JNIEnv *env, jclass clss,
+  jbyteArray ref, jint loc_id, jstring name, jint filter)
+{
+    herr_t retValue;
+
+    retValue = H5Zfilter_avail((H5Z_filter_t)filter);
+
+    if (retValue < 0) {
+        h5libraryError(env);
+    }
+
+    return (jint)retValue;
+}
+
+
+/**********************************************************************
+ *                                                                    *
+ *          New functions release 1.6.3 versus release 1.6.2          *
+ *                                                                    *
+ **********************************************************************/
+
+/*
+ * Class:     ncsa_hdf_hdf5lib_H5
+ * Signature: herr_t H5Zget_filter_info (H5Z_filter_t filter, unsigned * flags)
+ * Purpose:
+ */
+
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_H5Zget_1filter_1info
+  (JNIEnv *env, jclass clss, jint filter)
+{
+    herr_t status;
+    unsigned int flags = 0;
+
+    status = H5Zget_filter_info ((H5Z_filter_t) filter, (unsigned *) &flags);
+
+    if (status < 0) {
+        h5libraryError(env);
+    }
+
+    return flags;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/source/c/jhdf5/strcpyJHDF5.c b/source/c/jhdf5/strcpyJHDF5.c
new file mode 100644
index 0000000..4cdd464
--- /dev/null
+++ b/source/c/jhdf5/strcpyJHDF5.c
@@ -0,0 +1,159 @@
+#include "hdf5.h"
+#include "h5utilJHDF5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+
+extern jboolean h5outOfMemory( JNIEnv *env, char *functName);
+extern jboolean h5JNIFatalError( JNIEnv *env, char *functName);
+extern jboolean h5nullArgument( JNIEnv *env, char *functName);
+extern jboolean h5libraryError( JNIEnv *env );
+
+
+/*
+ * Class:     ch_systemsx_cisd_hdf5_hdf5lib_H5
+ * Method:    getPointerSize
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_getPointerSize
+  (JNIEnv *env, jclass clss)
+{
+    return sizeof(void *);
+}
+
+/*
+ * Class:     ch_systemsx_cisd_hdf5_hdf5lib_H5
+ * Method:    compoundCpyVLStr
+ * Signature: (Ljava/lang/String;[B)I)I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_compoundCpyVLStr
+  (JNIEnv *env, 
+   jclass clss, 
+   jstring str, /* IN: the string to copy */ 
+   jbyteArray buf, /* OUT: array of byte */
+   jint bufOfs /* The offset to copy the pointer to the string to. */
+  )
+{
+    jbyte *byteP;
+    char *strPCpy;
+    int numberOfBytes, numberOfCharacters;
+
+
+    if ( str == NULL ) {
+        h5nullArgument( env, "compoundCpyVLStr:  str is NULL");
+        return -1;
+    }
+    if ( buf == NULL ) {
+        h5nullArgument( env, "compoundCpyVLStr:  buf is NULL");
+        return -1;
+    }
+
+	numberOfBytes = (*env)->GetStringUTFLength(env, str);
+	strPCpy = calloc(1, numberOfBytes);
+	numberOfCharacters = (*env)->GetStringLength(env, str);
+    (*env)->GetStringUTFRegion(env, str, 0, numberOfCharacters, strPCpy);
+
+    byteP = (*env)->GetPrimitiveArrayCritical(env, buf, NULL);
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "compoundCpyVLStr:  buf not pinned");
+        return -1;
+    }
+	*((char**)(byteP + bufOfs)) = strPCpy;
+    (*env)->ReleasePrimitiveArrayCritical(env, buf, byteP, 0);
+
+	return 0;
+}
+
+/*
+ * Class:     ch_systemsx_cisd_hdf5_hdf5lib_H5
+ * Method:    createVLStrFromCompound
+ * Signature: ([B)I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_createVLStrFromCompound
+  (JNIEnv *env, 
+   jclass clss, 
+   jbyteArray buf, /* IN: array of byte containing the compound or compound array. */
+   jint offset /* IN: The offset in the compound or compound array where the pointer to the string is located. */
+  )
+{
+    char *byteP;
+    char **strP;
+    jstring str;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "createVLStrFromCompound:  buf is NULL");
+        return NULL;
+    }
+
+    byteP = (*env)->GetPrimitiveArrayCritical(env, buf, NULL);
+    if (byteP == NULL) {
+        h5JNIFatalError( env, "createVLStrFromCompound:  buf not pinned");
+        return NULL;
+    }
+    
+	strP = (char**) (byteP + offset);
+	str = (*env)->NewStringUTF(env, *strP);
+	
+    (*env)->ReleasePrimitiveArrayCritical(env, buf, byteP, 0);
+	
+	return str;
+}
+
+/*
+ * Class:     ch_systemsx_cisd_hdf5_hdf5lib_H5
+ * Method:    freeCompoundVLStr
+ * Signature: ([B)I[I))I
+ */
+JNIEXPORT jint JNICALL Java_ch_systemsx_cisd_hdf5_hdf5lib_H5_freeCompoundVLStr
+  (JNIEnv *env, 
+   jclass clss, 
+   jbyteArray buf, /* IN: array of byte containing the compound or compound array. */
+   jint recordSize, /* IN: The size of one compound record. */
+   jintArray vlIndices /* IN: The indices of the variable-length compound members in the record. */
+  )
+{
+    char *byteP, *ptr;
+    char **strP;
+    jsize bufLen, idxLen;
+    int *idxP, i;
+
+    if ( buf == NULL ) {
+        h5nullArgument( env, "freeCompoundVLStr:  buf is NULL");
+        return -1;
+    }
+    if ( vlIndices == NULL ) {
+        h5nullArgument( env, "freeCompoundVLStr:  vlIndices is NULL");
+        return -1;
+    }
+
+	idxLen = (*env)->GetArrayLength(env, vlIndices);
+	bufLen = (*env)->GetArrayLength(env, buf);
+
+    idxP = (*env)->GetPrimitiveArrayCritical(env, vlIndices, NULL);
+    if (idxP == NULL) {
+        h5JNIFatalError( env, "freeCompoundVLStr:  vlIndices not pinned");
+        return -1;
+    }
+    byteP = (*env)->GetPrimitiveArrayCritical(env, buf, NULL);
+    if (byteP == NULL) {
+	    (*env)->ReleasePrimitiveArrayCritical(env, vlIndices, idxP, 0);
+        h5JNIFatalError( env, "freeCompoundVLStr:  buf not pinned");
+        return -1;
+    }
+    
+	ptr = byteP;
+	while (ptr - byteP < bufLen)
+	{
+	    for (i = 0; i < idxLen; ++i)
+	    {
+	    	strP = (char**) (ptr + idxP[i]);
+	        free(*strP);
+	    }
+	    ptr += recordSize; 
+	}
+	
+    (*env)->ReleasePrimitiveArrayCritical(env, vlIndices, idxP, 0);
+    (*env)->ReleasePrimitiveArrayCritical(env, buf, byteP, 0);
+	
+	return 0;
+}
diff --git a/source/c/version.sh b/source/c/version.sh
new file mode 100755
index 0000000..e1dedeb
--- /dev/null
+++ b/source/c/version.sh
@@ -0,0 +1 @@
+VERSION=1.8.14
diff --git a/source/java/ch/systemsx/cisd/hdf5/BitSetConversionUtils.java b/source/java/ch/systemsx/cisd/hdf5/BitSetConversionUtils.java
new file mode 100644
index 0000000..6011900
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/BitSetConversionUtils.java
@@ -0,0 +1,292 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Field;
+import java.util.BitSet;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import org.apache.commons.lang.SystemUtils;
+
+import ch.rinn.restrictions.Private;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * Methods for converting {@link BitSet}s to a storage form suitable for storing in an HDF5 file.
+ * <p>
+ * <i>This is an internal API that should not be expected to be stable between releases!</i>
+ * 
+ * @author Bernd Rinn
+ */
+public final class BitSetConversionUtils
+{
+    private final static int ADDRESS_BITS_PER_WORD = 6;
+
+    private final static int BITS_PER_WORD = 1 << ADDRESS_BITS_PER_WORD;
+
+    private final static int BIT_INDEX_MASK = BITS_PER_WORD - 1;
+
+    private static final Field BIT_SET_WORDS = getBitSetWords();
+
+    private static final Field BIT_SET_WORDS_IN_USE = getBitSetWordsInUse();
+
+    private static Field getBitSetWords()
+    {
+        try
+        {
+            final Field bitsField =
+                    BitSet.class.getDeclaredField(SystemUtils.IS_JAVA_1_5 ? "bits" : "words");
+            bitsField.setAccessible(true);
+            return bitsField;
+        } catch (final NoSuchFieldException ex)
+        {
+            return null;
+        }
+    }
+
+    private static Field getBitSetWordsInUse()
+    {
+        try
+        {
+            final Field unitsInUseField =
+                    BitSet.class.getDeclaredField(SystemUtils.IS_JAVA_1_5 ? "unitsInUse"
+                            : "wordsInUse");
+            unitsInUseField.setAccessible(true);
+            return unitsInUseField;
+        } catch (final NoSuchFieldException ex)
+        {
+            return null;
+        }
+    }
+
+    public static BitSet fromStorageForm(final long[] serializedWordArray)
+    {
+        return fromStorageForm(serializedWordArray, 0, serializedWordArray.length);
+    }
+    
+    public static BitSet fromStorageForm(final long[] serializedWordArray, int start, int length)
+    {
+        if (BIT_SET_WORDS != null)
+        {
+            return fromStorageFormFast(serializedWordArray, start, length);
+        } else
+        {
+            return fromStorageFormGeneric(serializedWordArray, start, length);
+        }
+    }
+
+    public static BitSet[] fromStorageForm2D(final MDLongArray serializedWordArray)
+    {
+        if (serializedWordArray.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + serializedWordArray.rank());
+        }
+        final int dimX = serializedWordArray.dimensions()[0];
+        final int dimY = serializedWordArray.dimensions()[1];
+        final BitSet[] result = new BitSet[dimY];
+        for (int i = 0; i < result.length; ++i)
+        {
+            result[i] = fromStorageForm(serializedWordArray.getAsFlatArray(), i * dimX, dimX);
+        }
+        return result;
+    }
+
+    private static BitSet fromStorageFormFast(final long[] serializedWordArray, int start, int length)
+    {
+        try
+        {
+            final BitSet result = new BitSet();
+            int inUse = calcInUse(serializedWordArray, start, length);
+            BIT_SET_WORDS_IN_USE.set(result, inUse);
+            BIT_SET_WORDS.set(result, trim(serializedWordArray, start, inUse));
+            return result;
+        } catch (final IllegalAccessException ex)
+        {
+            throw new IllegalAccessError(ex.getMessage());
+        }
+    }
+
+    @Private
+    static BitSet fromStorageFormGeneric(final long[] serializedWordArray, int start, int length)
+    {
+        final BitSet result = new BitSet();
+        for (int wordIndex = 0; wordIndex < length; ++wordIndex)
+        {
+            final long word = serializedWordArray[start + wordIndex];
+            for (int bitInWord = 0; bitInWord < BITS_PER_WORD; ++bitInWord)
+            {
+                if ((word & 1L << bitInWord) != 0)
+                {
+                    result.set(wordIndex << ADDRESS_BITS_PER_WORD | bitInWord);
+                }
+            }
+        }
+        return result;
+    }
+
+    public static long[] toStorageForm(final BitSet[] data, int numberOfWords)
+    {
+        final long[] result = new long[data.length * numberOfWords];
+        int idx = 0;
+        for (BitSet bs : data)
+        {
+            System.arraycopy(toStorageForm(bs, numberOfWords), 0, result, idx, numberOfWords);
+            idx += numberOfWords;
+        }
+        return result;
+    }
+
+    public static long[] toStorageForm(final BitSet data)
+    {
+        if (BIT_SET_WORDS != null)
+        {
+            return toStorageFormFast(data);
+        } else
+        {
+            return toStorageFormGeneric(data);
+        }
+    }
+
+    public static long[] toStorageForm(final BitSet data, int numberOfWords)
+    {
+        if (BIT_SET_WORDS != null)
+        {
+            return toStorageFormFast(data, numberOfWords);
+        } else
+        {
+            return toStorageFormGeneric(data, numberOfWords);
+        }
+    }
+
+    private static long[] toStorageFormFast(final BitSet data)
+    {
+        try
+        {
+            long[] storageForm = (long[]) BIT_SET_WORDS.get(data);
+            int inUse = BIT_SET_WORDS_IN_USE.getInt(data);
+            return trim(storageForm, 0, inUse);
+        } catch (final IllegalAccessException ex)
+        {
+            throw new IllegalAccessError(ex.getMessage());
+        }
+    }
+
+    private static long[] toStorageFormFast(final BitSet data, int numberOfWords)
+    {
+        try
+        {
+            long[] storageForm = (long[]) BIT_SET_WORDS.get(data);
+            return trimEnforceLen(storageForm, 0, numberOfWords);
+        } catch (final IllegalAccessException ex)
+        {
+            throw new IllegalAccessError(ex.getMessage());
+        }
+    }
+
+    private static long[] trim(final long[] array, int start, int len)
+    {
+        final int inUse = calcInUse(array, start, len);
+        if (inUse < array.length)
+        {
+            final long[] trimmedArray = new long[inUse];
+            System.arraycopy(array, start, trimmedArray, 0, inUse);
+            return trimmedArray;
+        }
+        return array;
+    }
+
+    private static long[] trimEnforceLen(final long[] array, int start, int len)
+    {
+        if (len != array.length)
+        {
+            final long[] trimmedArray = new long[len];
+            final int inUse = calcInUse(array, start, len);
+            System.arraycopy(array, start, trimmedArray, 0, inUse);
+            return trimmedArray;
+        }
+        return array;
+    }
+
+    private static int calcInUse(final long[] array, int start, int len)
+    {
+        int result = Math.min(len, array.length);
+        while (result > 0 && array[start + result - 1] == 0)
+        {
+            --result;
+        }
+        return result;
+    }
+
+    /**
+     * Given a bit index return the word index containing it.
+     */
+    public static int getWordIndex(final int bitIndex)
+    {
+        return bitIndex >> ADDRESS_BITS_PER_WORD;
+    }
+
+    /**
+     * Given a bit index, return a unit that masks that bit in its unit.
+     */
+    public static long getBitMaskInWord(final int bitIndex)
+    {
+        return 1L << (bitIndex & BIT_INDEX_MASK);
+    }
+
+    // @Private
+    static long[] toStorageFormGeneric(final BitSet data)
+    {
+        final long[] words = new long[data.size() >> ADDRESS_BITS_PER_WORD];
+        for (int bitIndex = data.nextSetBit(0); bitIndex >= 0; bitIndex =
+                data.nextSetBit(bitIndex + 1))
+        {
+            final int wordIndex = getWordIndex(bitIndex);
+            words[wordIndex] |= getBitMaskInWord(bitIndex);
+        }
+        return words;
+    }
+
+    // @Private
+    static long[] toStorageFormGeneric(final BitSet data, final int numberOfWords)
+    {
+        final long[] words = new long[numberOfWords];
+        for (int bitIndex = data.nextSetBit(0); bitIndex >= 0; bitIndex =
+                data.nextSetBit(bitIndex + 1))
+        {
+            final int wordIndex = getWordIndex(bitIndex);
+            if (wordIndex >= words.length)
+            {
+                break;
+            }
+            words[wordIndex] |= getBitMaskInWord(bitIndex);
+        }
+        return words;
+    }
+
+    static int getMaxLength(BitSet[] data)
+    {
+        int length = 0;
+        for (BitSet bs : data)
+        {
+            length = Math.max(length, bs.length());
+        }
+        return length;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/BuildAndEnvironmentInfo.java b/source/java/ch/systemsx/cisd/hdf5/BuildAndEnvironmentInfo.java
new file mode 100644
index 0000000..f225fb1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/BuildAndEnvironmentInfo.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.utilities.AbstractBuildAndEnvironmentInfo;
+
+
+/**
+ * The build and environment information for JHDF5.
+ *
+ * @author Franz-Josef Elmer
+ */
+public class BuildAndEnvironmentInfo extends AbstractBuildAndEnvironmentInfo
+{
+    private final static String BASE = "jhdf5";
+    
+    public final static BuildAndEnvironmentInfo INSTANCE = new BuildAndEnvironmentInfo();
+    
+    private BuildAndEnvironmentInfo()
+    {
+        super(BASE);
+    }
+
+    /**
+     * Shows build and environment information on the console.
+     */
+    public static void main(String[] args)
+    {
+        System.out.println(INSTANCE);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/CharacterEncoding.java b/source/java/ch/systemsx/cisd/hdf5/CharacterEncoding.java
new file mode 100644
index 0000000..e4a12a5
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/CharacterEncoding.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_CSET_ASCII;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_CSET_UTF8;
+
+/**
+ * An enum for character encodings of path names and strings in JHDF5. 
+ *
+ * @author Bernd Rinn
+ */
+public enum CharacterEncoding
+{
+    ASCII("ASCII", H5T_CSET_ASCII, 1), 
+    UTF8("UTF8", H5T_CSET_UTF8, 4);
+
+    private final String charSetName;
+    
+    private final int cValue;
+    
+    private final int maxBytesPerChar;
+    
+    private CharacterEncoding(String charSetName, int cValue, int maxBytesPerChar)
+    {
+        this.charSetName = charSetName;
+        this.cValue = cValue;
+        this.maxBytesPerChar = maxBytesPerChar;
+    }
+    
+    int getCValue()
+    {
+        return cValue;
+    }
+    
+    String getCharSetName()
+    {
+        return charSetName;
+    }
+
+    /**
+     * Returns the maximum number of bytes per character.
+     */
+    int getMaxBytesPerChar()
+    {
+        return maxBytesPerChar;
+    }
+
+    static CharacterEncoding fromCValue(int cValue) throws IllegalArgumentException
+    {
+        if (cValue == H5T_CSET_ASCII)
+        {
+            return ASCII;
+        } else if (cValue == H5T_CSET_UTF8)
+        {
+            return UTF8;
+        } else
+        {
+            throw new IllegalArgumentException("Illegal character encoding id " + cValue);
+        }
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/CompoundElement.java b/source/java/ch/systemsx/cisd/hdf5/CompoundElement.java
new file mode 100644
index 0000000..9501298
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/CompoundElement.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * A marker annotation for fields intended to be mapped to an HDF5 compound data type member. This
+ * marker interface is optional for many fields as otherwise the fields properties are inferred.
+ * However, for arrays, <code>String</code>s and <code>BitSet</code>s the maximum length needs to be
+ * given in {@link #dimensions()}.
+ * 
+ * @author Bernd Rinn
+ */
+ at Retention(RUNTIME)
+ at Target(FIELD)
+public @interface CompoundElement
+{
+
+    /**
+     * The name of the member in the compound type. Leave empty to use the field name as member
+     * name.
+     */
+    String memberName() default "";
+
+    /**
+     * The name of the type (for Java enumeration types only). Leave empty to use the simple class
+     * name as the type name.
+     */
+    String typeName() default "";
+
+    /**
+     * The length / dimensions of the compound member. Is required for compound members that have a
+     * variable length, e.g. strings or primitive arrays. Ignored for compound members that have a
+     * fixed length, e.g. a float field.
+     */
+    int[] dimensions() default 0;
+    
+    /**
+     * If <code>true</code>, map this integer field to an unsigned integer type. 
+     */
+    boolean unsigned() default false;
+    
+    /**
+     * If <code>true</code>, map this string field to a variable-length string type.
+     */
+    boolean variableLength() default false;
+    
+    /**
+     * If <code>true</code>, map this string field to an HDF5 reference type.
+     */
+    boolean reference() default false;
+
+    /**
+     * The {@link HDF5DataTypeVariant} of this compound element, if any.
+     */
+    HDF5DataTypeVariant typeVariant() default HDF5DataTypeVariant.NONE;
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/CompoundType.java b/source/java/ch/systemsx/cisd/hdf5/CompoundType.java
new file mode 100644
index 0000000..d5ccdc9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/CompoundType.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static java.lang.annotation.ElementType.TYPE;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * A marker annotation for classes intended to be mapped to an HDF5 compound data type. This marker
+ * interface is optional as inferring the field to member mapping works also when this annotation is
+ * not present. However, this annotation is the only way to specify that not all fields should be
+ * mapped to members but only those annotated with {@link CompoundElement}.
+ * 
+ * @author Bernd Rinn
+ */
+ at Retention(RUNTIME)
+ at Target(TYPE)
+public @interface CompoundType
+{
+    /**
+     * The name this compound type should have in the HDF5 file. If left blank, the simple class
+     * name will be used.
+     */
+    String name() default "";
+
+    /**
+     * Whether all fields should be mapped to members of the compound type or only the fields
+     * annotated with {@link CompoundElement} (default: <code>true</code>).
+     */
+    boolean mapAllFields() default true;
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/CompoundTypeInformation.java b/source/java/ch/systemsx/cisd/hdf5/CompoundTypeInformation.java
new file mode 100644
index 0000000..17574a8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/CompoundTypeInformation.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * A class with basic information about the types of compound members in an HDF5 file.
+ * 
+ * @author Bernd Rinn
+ */
+final class CompoundTypeInformation
+{
+    final String name;
+
+    final int compoundDataTypeId;
+
+    final int nativeCompoundDataTypeId;
+
+    private final HDF5CompoundMemberInformation[] members;
+
+    final int[] dataTypeIds;
+    
+    final HDF5EnumerationType[] enumTypes;
+
+    final int recordSizeOnDisk;
+    
+    final int recordSizeInMemory;
+    
+    private int numberOfVLMembers;
+
+    CompoundTypeInformation(String name, int compoundDataTypeId, int nativeCompoundDataTypeId,
+            int numberOfElements, int recordSizeOnDisk, int recordSizeInMemory)
+    {
+        this.name = name;
+        this.compoundDataTypeId = compoundDataTypeId;
+        this.nativeCompoundDataTypeId = nativeCompoundDataTypeId;
+        this.members = new HDF5CompoundMemberInformation[numberOfElements];
+        this.dataTypeIds = new int[numberOfElements];
+        this.enumTypes = new HDF5EnumerationType[numberOfElements];
+        this.recordSizeOnDisk = recordSizeOnDisk;
+        this.recordSizeInMemory = recordSizeInMemory;
+    }
+    
+    private void calcNumberOfVLMembers()
+    {
+        int countOfVLMembers = 0;
+        for (HDF5CompoundMemberInformation m : members)
+        {
+            if (m != null && m.getType().isVariableLengthString())
+            {
+                ++countOfVLMembers;
+            }
+        }
+        this.numberOfVLMembers = countOfVLMembers;
+    }
+    
+    int getNumberOfMembers()
+    {
+        return members.length;
+    }
+
+    HDF5CompoundMemberInformation getMember(int i)
+    {
+        return members[i];
+    }
+    
+    HDF5CompoundMemberInformation[] getCopyOfMembers()
+    {
+        return members.clone();
+    }
+    
+    void setMember(int i, HDF5CompoundMemberInformation member)
+    {
+        members[i] = member;
+        calcNumberOfVLMembers();
+    }
+    
+    int getNumberOfVLMembers()
+    {
+        return numberOfVLMembers;
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/EnumerationType.java b/source/java/ch/systemsx/cisd/hdf5/EnumerationType.java
new file mode 100644
index 0000000..196bbb3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/EnumerationType.java
@@ -0,0 +1,350 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5EnumerationType.EnumStorageForm;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * An enumeration type, consisting of name and the list of names of the enumeration terms.
+ * 
+ * @author Bernd Rinn
+ */
+public class EnumerationType implements Iterable<String>
+{
+    private final String nameOrNull;
+
+    private final String[] values;
+
+    private final List<String> unmodifiableValues;
+
+    private Map<String, Integer> nameToIndexMap;
+
+    public EnumerationType(String nameOrNull, String[] values)
+    {
+        assert values != null;
+
+        this.nameOrNull = nameOrNull;
+        this.values = values;
+        this.unmodifiableValues = Collections.unmodifiableList(Arrays.asList(values));
+    }
+
+    public EnumerationType(Class<? extends Enum<?>> enumClass)
+    {
+        this(enumClass.getName(), enumClass);
+    }
+    
+    public EnumerationType(String nameOrNull, Class<? extends Enum<?>> enumClass)
+    {
+        this(nameOrNull, ReflectionUtils.getEnumOptions(enumClass));
+    }
+    
+    private Map<String, Integer> getMap()
+    {
+        if (nameToIndexMap == null)
+        {
+            nameToIndexMap = new HashMap<String, Integer>(values.length);
+            for (int i = 0; i < values.length; ++i)
+            {
+                nameToIndexMap.put(values[i], i);
+            }
+        }
+        return nameToIndexMap;
+    }
+
+    String[] getValueArray()
+    {
+        return values;
+    }
+
+    Object createArray(int length)
+    {
+        if (values.length < Byte.MAX_VALUE)
+        {
+            return new byte[length];
+        } else if (values.length < Short.MAX_VALUE)
+        {
+            return new short[length];
+        } else
+        {
+            return new int[length];
+        }
+    }
+
+    /**
+     * Returns the ordinal value for the given string <var>value</var>, if <var>value</var> is a
+     * member of the enumeration, and <code>null</code> otherwise.
+     */
+    public Integer tryGetIndexForValue(String value)
+    {
+        return getMap().get(value);
+    }
+
+    /**
+     * Returns the name of this type, if it exists, or <code>NONAME</code> otherwise.
+     */
+    public String getName()
+    {
+        if (nameOrNull == null)
+        {
+            return "NONAME";
+        } else
+        {
+            return nameOrNull;
+        }
+    }
+    
+    /**
+     * Returns the name of this type, if it exists, or <code>null</code> otherwise.
+     */
+    public String tryGetName()
+    {
+        return nameOrNull;
+    }
+
+    /**
+     * Returns the allowed values of this enumeration type.
+     */
+    public List<String> getValues()
+    {
+        return unmodifiableValues;
+    }
+
+    /**
+     * Returns the {@link EnumStorageForm} of this enumeration type.
+     */
+    public EnumStorageForm getStorageForm()
+    {
+        final int len = values.length;
+        if (len < Byte.MAX_VALUE)
+        {
+            return EnumStorageForm.BYTE;
+        } else if (len < Short.MAX_VALUE)
+        {
+            return EnumStorageForm.SHORT;
+        } else
+        {
+            return EnumStorageForm.INT;
+        }
+    }
+
+    byte getNumberOfBits()
+    {
+        final int n = (values.length > 0) ? values.length - 1 : 0;
+        // Binary search - decision tree (5 tests, rarely 6)
+        return (byte) (n < 1 << 15 ? (n < 1 << 7 ? (n < 1 << 3 ? (n < 1 << 1 ? (n < 1 << 0 ? (n < 0 ? 32
+                : 0)
+                : 1)
+                : (n < 1 << 2 ? 2 : 3))
+                : (n < 1 << 5 ? (n < 1 << 4 ? 4 : 5) : (n < 1 << 6 ? 6 : 7)))
+                : (n < 1 << 11 ? (n < 1 << 9 ? (n < 1 << 8 ? 8 : 9) : (n < 1 << 10 ? 10 : 11))
+                        : (n < 1 << 13 ? (n < 1 << 12 ? 12 : 13) : (n < 1 << 14 ? 14 : 15))))
+                : (n < 1 << 23 ? (n < 1 << 19 ? (n < 1 << 17 ? (n < 1 << 16 ? 16 : 17)
+                        : (n < 1 << 18 ? 18 : 19)) : (n < 1 << 21 ? (n < 1 << 20 ? 20 : 21)
+                        : (n < 1 << 22 ? 22 : 23)))
+                        : (n < 1 << 27 ? (n < 1 << 25 ? (n < 1 << 24 ? 24 : 25) : (n < 1 << 26 ? 26
+                                : 27)) : (n < 1 << 29 ? (n < 1 << 28 ? 28 : 29) : (n < 1 << 30 ? 30
+                                : 31)))));
+    }
+
+    byte[] toStorageForm(int ordinal)
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return HDFNativeData.byteToByte((byte) ordinal);
+            case SHORT:
+                return HDFNativeData.shortToByte((short) ordinal);
+            case INT:
+                return HDFNativeData.intToByte(ordinal);
+        }
+        throw new Error("Illegal storage size.");
+    }
+
+    static int fromStorageForm(byte[] data)
+    {
+        if (data.length == 1)
+        {
+            return data[0];
+        } else if (data.length == 2)
+        {
+            return NativeData.byteToShort(data, ByteOrder.NATIVE)[0];
+        } else if (data.length == 4)
+        {
+            return NativeData.byteToInt(data, ByteOrder.NATIVE)[0];
+        }
+        throw new HDF5JavaException("Unexpected size for Enum data type (" + data.length + ")");
+    }
+
+    static int fromStorageForm(byte[] data, int index, int size)
+    {
+        if (size == 1)
+        {
+            return data[index];
+        } else if (size == 2)
+        {
+            return NativeData.byteToShort(data, ByteOrder.NATIVE, size * index, 1)[0];
+        } else if (size == 4)
+        {
+            return NativeData.byteToInt(data, ByteOrder.NATIVE, index, 1)[0];
+        }
+        throw new HDF5JavaException("Unexpected size for Enum data type (" + size + ")");
+    }
+
+    static Object fromStorageForm(byte[] data, EnumStorageForm storageForm)
+    {
+        switch (storageForm)
+        {
+            case BYTE:
+                return data;
+            case SHORT:
+                return NativeData.byteToShort(data, ByteOrder.NATIVE);
+            case INT:
+                return NativeData.byteToInt(data, ByteOrder.NATIVE);
+        }
+        throw new Error("Illegal storage size.");
+    }
+
+    static MDAbstractArray<?> fromStorageForm(byte[] data, long[] dimensions,
+            EnumStorageForm storageForm)
+    {
+        switch (storageForm)
+        {
+            case BYTE:
+                return new MDByteArray(data, dimensions);
+            case SHORT:
+                return new MDShortArray(NativeData.byteToShort(data, ByteOrder.NATIVE), dimensions);
+            case INT:
+                return new MDIntArray(NativeData.byteToInt(data, ByteOrder.NATIVE), dimensions);
+        }
+        throw new Error("Illegal storage size.");
+    }
+
+    static MDAbstractArray<?> fromStorageForm(byte[] data, int[] dimensions,
+            EnumStorageForm storageForm)
+    {
+        switch (storageForm)
+        {
+            case BYTE:
+                return new MDByteArray(data, dimensions);
+            case SHORT:
+                return new MDShortArray(NativeData.byteToShort(data, ByteOrder.NATIVE), dimensions);
+            case INT:
+                return new MDIntArray(NativeData.byteToInt(data, ByteOrder.NATIVE), dimensions);
+        }
+        throw new Error("Illegal storage size.");
+    }
+
+    String createStringFromStorageForm(byte[] data, int offset)
+    {
+        return values[getOrdinalFromStorageForm(data, offset)];
+    }
+
+    int getOrdinalFromStorageForm(byte[] data, int offset)
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return data[offset];
+            case SHORT:
+                return HDFNativeData.byteToShort(data, offset);
+            case INT:
+                return HDFNativeData.byteToInt(data, offset);
+        }
+        throw new Error("Illegal storage form (" + getStorageForm() + ".)");
+    }
+
+    //
+    // Iterable
+    //
+
+    /**
+     * Returns an {@link Iterator} over all values of this enumeration type.
+     * {@link Iterator#remove()} is not allowed and will throw an
+     * {@link UnsupportedOperationException}.
+     */
+    @Override
+    public Iterator<String> iterator()
+    {
+        return new Iterator<String>()
+            {
+                private int index = 0;
+
+                @Override
+                public boolean hasNext()
+                {
+                    return index < values.length;
+                }
+
+                @Override
+                public String next()
+                {
+                    return values[index++];
+                }
+
+                /**
+                 * @throws UnsupportedOperationException As this iterator doesn't support removal.
+                 */
+                @Override
+                public void remove() throws UnsupportedOperationException
+                {
+                    throw new UnsupportedOperationException();
+                }
+
+            };
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = super.hashCode();
+        result = prime * result + Arrays.hashCode(values);
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final EnumerationType other = (EnumerationType) obj;
+        return Arrays.equals(values, other.values);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5.java b/source/java/ch/systemsx/cisd/hdf5/HDF5.java
new file mode 100644
index 0000000..1c093c2
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5.java
@@ -0,0 +1,2065 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5F.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5A.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5GLO.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5P.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5RI.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5S.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5T.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5D_CHUNKED;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5D_COMPACT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5D_FILL_TIME_ALLOC;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_ACC_RDONLY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_ACC_RDWR;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_ACC_TRUNC;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_LIBVER_LATEST;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_SCOPE_GLOBAL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5O_TYPE_GROUP;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_ATTRIBUTE_CREATE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DATASET_CREATE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_FILE_ACCESS;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_GROUP_CREATE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_LINK_CREATE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5R_OBJECT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_MAX_RANK;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SELECT_SET;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_UNLIMITED;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_COMPOUND;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_C_S1;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ENUM;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_FLOAT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_INTEGER;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_OPAQUE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_OPAQUE_TAG_MAX;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_SGN_NONE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I16LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I8LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STR_NULLPAD;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_VARIABLE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5Z_SO_FLOAT_DSCALE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5Z_SO_INT;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.cleanup.CleanUpCallable;
+import ch.systemsx.cisd.hdf5.cleanup.CleanUpRegistry;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A wrapper around {@link ch.systemsx.cisd.hdf5.hdf5lib.H5General} that handles closing of
+ * resources automatically by means of registering clean-up {@link Runnable}s.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5
+{
+
+    private final static int MAX_PATH_LENGTH = 16384;
+
+    private final CleanUpCallable runner;
+
+    private final int dataSetCreationPropertyListCompactStorageLayoutFileTimeAlloc;
+
+    private final int dataSetCreationPropertyListFillTimeAlloc;
+
+    private final int numericConversionXferPropertyListID;
+
+    private final int lcplCreateIntermediateGroups;
+
+    private final boolean useUTF8CharEncoding;
+
+    private final boolean autoDereference;
+
+    public HDF5(final CleanUpRegistry fileRegistry, final CleanUpCallable runner,
+            final boolean performNumericConversions, final boolean useUTF8CharEncoding,
+            final boolean autoDereference)
+    {
+        this.runner = runner;
+        this.useUTF8CharEncoding = useUTF8CharEncoding;
+        this.autoDereference = autoDereference;
+        this.dataSetCreationPropertyListCompactStorageLayoutFileTimeAlloc =
+                createDataSetCreationPropertyList(fileRegistry);
+        H5Pset_layout(dataSetCreationPropertyListCompactStorageLayoutFileTimeAlloc, H5D_COMPACT);
+        this.dataSetCreationPropertyListFillTimeAlloc =
+                createDataSetCreationPropertyList(fileRegistry);
+        if (performNumericConversions)
+        {
+            this.numericConversionXferPropertyListID =
+                    createDataSetXferPropertyListAbortOverflow(fileRegistry);
+        } else
+        {
+            this.numericConversionXferPropertyListID =
+                    createDataSetXferPropertyListAbort(fileRegistry);
+        }
+        this.lcplCreateIntermediateGroups = createLinkCreationPropertyList(true, fileRegistry);
+
+    }
+
+    private static void checkMaxLength(String path) throws HDF5JavaException
+    {
+        if (path.length() > MAX_PATH_LENGTH)
+        {
+            throw new HDF5JavaException("Path too long (length=" + path.length() + ")");
+        }
+    }
+
+    //
+    // File
+    //
+
+    public int createFile(String fileName, boolean useLatestFormat, ICleanUpRegistry registry)
+    {
+        final int fileAccessPropertyListId =
+                createFileAccessPropertyListId(useLatestFormat, registry);
+        final int fileId =
+                H5Fcreate(fileName, H5F_ACC_TRUNC, H5P_DEFAULT, fileAccessPropertyListId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Fclose(fileId);
+                }
+            });
+        return fileId;
+    }
+
+    private int createFileAccessPropertyListId(boolean enforce_1_8, ICleanUpRegistry registry)
+    {
+        int fileAccessPropertyListId = H5P_DEFAULT;
+        if (enforce_1_8)
+        {
+            final int fapl = H5Pcreate(H5P_FILE_ACCESS);
+            registry.registerCleanUp(new Runnable()
+                {
+                    @Override
+                    public void run()
+                    {
+                        H5Pclose(fapl);
+                    }
+                });
+            H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
+            fileAccessPropertyListId = fapl;
+        }
+        return fileAccessPropertyListId;
+    }
+
+    public int openFileReadOnly(String fileName, ICleanUpRegistry registry)
+    {
+        final int fileId = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Fclose(fileId);
+                }
+            });
+        return fileId;
+    }
+
+    public int openFileReadWrite(String fileName, boolean enforce_1_8, ICleanUpRegistry registry)
+    {
+        final int fileAccessPropertyListId = createFileAccessPropertyListId(enforce_1_8, registry);
+        final File f = new File(fileName);
+        if (f.exists() && f.isFile() == false)
+        {
+            throw new HDF5Exception("An entry with name '" + fileName
+                    + "' exists but is not a file.");
+        }
+        final int fileId = H5Fopen(fileName, H5F_ACC_RDWR, fileAccessPropertyListId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Fclose(fileId);
+                }
+            });
+        return fileId;
+    }
+
+    public void flushFile(int fileId)
+    {
+        H5Fflush(fileId, H5F_SCOPE_GLOBAL);
+    }
+
+    //
+    // Object
+    //
+
+    public int openObject(int fileId, String path, ICleanUpRegistry registry)
+    {
+        checkMaxLength(path);
+        final int objectId =
+                isReference(path) ? H5Rdereference(fileId, Long.parseLong(path.substring(1)))
+                        : H5Oopen(fileId, path, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Oclose(objectId);
+                }
+            });
+        return objectId;
+    }
+
+    public int deleteObject(int fileId, String path)
+    {
+        checkMaxLength(path);
+        final int success = H5Gunlink(fileId, path);
+        return success;
+    }
+
+    public int copyObject(int srcFileId, String srcPath, int dstFileId, String dstPath)
+    {
+        checkMaxLength(srcPath);
+        checkMaxLength(dstPath);
+        final int success =
+                H5Ocopy(srcFileId, srcPath, dstFileId, dstPath, H5P_DEFAULT, H5P_DEFAULT);
+        return success;
+    }
+
+    public int moveLink(int fileId, String srcLinkPath, String dstLinkPath)
+    {
+        checkMaxLength(srcLinkPath);
+        checkMaxLength(dstLinkPath);
+        final int success =
+                H5Lmove(fileId, srcLinkPath, fileId, dstLinkPath, lcplCreateIntermediateGroups,
+                        H5P_DEFAULT);
+        return success;
+    }
+
+    //
+    // Group
+    //
+
+    public void createGroup(int fileId, String groupName)
+    {
+        checkMaxLength(groupName);
+        final int groupId =
+                H5Gcreate(fileId, groupName, lcplCreateIntermediateGroups, H5P_DEFAULT, H5P_DEFAULT);
+        H5Gclose(groupId);
+    }
+
+    public void createOldStyleGroup(int fileId, String groupName, int sizeHint,
+            ICleanUpRegistry registry)
+    {
+        checkMaxLength(groupName);
+        final int gcplId = H5Pcreate(H5P_GROUP_CREATE);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(gcplId);
+                }
+            });
+        H5Pset_local_heap_size_hint(gcplId, sizeHint);
+        final int groupId =
+                H5Gcreate(fileId, groupName, lcplCreateIntermediateGroups, gcplId, H5P_DEFAULT);
+        H5Gclose(groupId);
+    }
+
+    public void createNewStyleGroup(int fileId, String groupName, int maxCompact, int minDense,
+            ICleanUpRegistry registry)
+    {
+        checkMaxLength(groupName);
+        final int gcplId = H5Pcreate(H5P_GROUP_CREATE);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(gcplId);
+                }
+            });
+        H5Pset_link_phase_change(gcplId, maxCompact, minDense);
+        final int groupId =
+                H5Gcreate(fileId, groupName, lcplCreateIntermediateGroups, gcplId, H5P_DEFAULT);
+        H5Gclose(groupId);
+    }
+
+    public int openGroup(int fileId, String path, ICleanUpRegistry registry)
+    {
+        checkMaxLength(path);
+        final int groupId =
+                isReference(path) ? H5Rdereference(fileId, Long.parseLong(path.substring(1)))
+                        : H5Gopen(fileId, path, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Gclose(groupId);
+                }
+            });
+        return groupId;
+    }
+
+    public long getNumberOfGroupMembers(int fileId, String path, ICleanUpRegistry registry)
+    {
+        checkMaxLength(path);
+        final int groupId = H5Gopen(fileId, path, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Gclose(groupId);
+                }
+            });
+        return H5Gget_nlinks(groupId);
+    }
+
+    public boolean existsAttribute(final int objectId, final String attributeName)
+    {
+        checkMaxLength(attributeName);
+        return H5Aexists(objectId, attributeName);
+    }
+
+    public boolean exists(final int fileId, final String linkName)
+    {
+        checkMaxLength(linkName);
+        return H5Lexists(fileId, linkName);
+    }
+
+    public HDF5LinkInformation getLinkInfo(final int fileId, final String objectName,
+            boolean exceptionIfNonExistent)
+    {
+        checkMaxLength(objectName);
+        if ("/".equals(objectName))
+        {
+            return HDF5LinkInformation.ROOT_LINK_INFO;
+        }
+        final String[] lname = new String[1];
+        final int typeId = H5Lget_link_info(fileId, objectName, lname, exceptionIfNonExistent);
+        return HDF5LinkInformation.create(objectName, typeId, lname[0]);
+    }
+
+    public HDF5ObjectType getLinkTypeInfo(final int fileId, final String objectName,
+            boolean exceptionWhenNonExistent)
+    {
+        checkMaxLength(objectName);
+        if ("/".equals(objectName))
+        {
+            return HDF5ObjectType.GROUP;
+        }
+        final int typeId = H5Lget_link_info(fileId, objectName, null, exceptionWhenNonExistent);
+        return HDF5CommonInformation.objectTypeIdToObjectType(typeId);
+    }
+
+    public HDF5ObjectInformation getObjectInfo(final int fileId, final String objectName,
+            boolean exceptionWhenNonExistent)
+    {
+        checkMaxLength(objectName);
+        final long[] info = new long[5];
+        final int typeId = H5Oget_info_by_name(fileId, objectName, info, exceptionWhenNonExistent);
+        return new HDF5ObjectInformation(objectName,
+                HDF5CommonInformation.objectTypeIdToObjectType(typeId), info);
+    }
+
+    public int getObjectTypeId(final int fileId, final String objectName,
+            boolean exceptionWhenNonExistent)
+    {
+        checkMaxLength(objectName);
+        if ("/".equals(objectName))
+        {
+            return H5O_TYPE_GROUP;
+        }
+        return H5Oget_info_by_name(fileId, objectName, null, exceptionWhenNonExistent);
+    }
+
+    public HDF5ObjectType getObjectTypeInfo(final int fileId, final String objectName,
+            boolean exceptionWhenNonExistent)
+    {
+        return HDF5CommonInformation.objectTypeIdToObjectType(getObjectTypeId(fileId, objectName,
+                exceptionWhenNonExistent));
+    }
+
+    public String[] getGroupMembers(final int fileId, final String groupName)
+    {
+        checkMaxLength(groupName);
+        final ICallableWithCleanUp<String[]> dataDimensionRunnable =
+                new ICallableWithCleanUp<String[]>()
+                    {
+                        @Override
+                        public String[] call(ICleanUpRegistry registry)
+                        {
+                            final int groupId = openGroup(fileId, groupName, registry);
+                            final long nLong = H5Gget_nlinks(groupId);
+                            final int n = (int) nLong;
+                            if (n != nLong)
+                            {
+                                throw new HDF5JavaException(
+                                        "Number of group members is too large (n=" + nLong + ")");
+                            }
+                            final String[] names = new String[n];
+                            H5Lget_link_names_all(groupId, ".", names);
+                            return names;
+                        }
+                    };
+        return runner.call(dataDimensionRunnable);
+    }
+
+    public List<HDF5LinkInformation> getGroupMemberLinkInfo(final int fileId,
+            final String groupName, final boolean includeInternal,
+            final String houseKeepingNameSuffix)
+    {
+        checkMaxLength(groupName);
+        final ICallableWithCleanUp<List<HDF5LinkInformation>> dataDimensionRunnable =
+                new ICallableWithCleanUp<List<HDF5LinkInformation>>()
+                    {
+                        @Override
+                        public List<HDF5LinkInformation> call(ICleanUpRegistry registry)
+                        {
+                            final int groupId = openGroup(fileId, groupName, registry);
+                            final long nLong = H5Gget_nlinks(groupId);
+                            final int n = (int) nLong;
+                            if (n != nLong)
+                            {
+                                throw new HDF5JavaException(
+                                        "Number of group members is too large (n=" + nLong + ")");
+                            }
+                            final String[] names = new String[n];
+                            final String[] linkNames = new String[n];
+                            final int[] types = new int[n];
+                            H5Lget_link_info_all(groupId, ".", names, types, linkNames);
+                            final String superGroupName =
+                                    (groupName.equals("/") ? "/" : groupName + "/");
+                            final List<HDF5LinkInformation> info =
+                                    new LinkedList<HDF5LinkInformation>();
+                            for (int i = 0; i < n; ++i)
+                            {
+                                if (includeInternal
+                                        || HDF5Utils.isInternalName(names[i],
+                                                houseKeepingNameSuffix) == false)
+                                {
+                                    info.add(HDF5LinkInformation.create(superGroupName + names[i],
+                                            types[i], linkNames[i]));
+                                }
+                            }
+                            return info;
+                        }
+                    };
+        return runner.call(dataDimensionRunnable);
+    }
+
+    public List<HDF5LinkInformation> getGroupMemberTypeInfo(final int fileId,
+            final String groupName, final boolean includeInternal,
+            final String houseKeepingNameSuffix)
+    {
+        checkMaxLength(groupName);
+        final ICallableWithCleanUp<List<HDF5LinkInformation>> dataDimensionRunnable =
+                new ICallableWithCleanUp<List<HDF5LinkInformation>>()
+                    {
+                        @Override
+                        public List<HDF5LinkInformation> call(ICleanUpRegistry registry)
+                        {
+                            final int groupId = openGroup(fileId, groupName, registry);
+                            final long nLong = H5Gget_nlinks(groupId);
+                            final int n = (int) nLong;
+                            if (n != nLong)
+                            {
+                                throw new HDF5JavaException(
+                                        "Number of group members is too large (n=" + nLong + ")");
+                            }
+                            final String[] names = new String[n];
+                            final int[] types = new int[n];
+                            H5Lget_link_info_all(groupId, ".", names, types, null);
+                            final String superGroupName =
+                                    (groupName.equals("/") ? "/" : groupName + "/");
+                            final List<HDF5LinkInformation> info =
+                                    new LinkedList<HDF5LinkInformation>();
+                            for (int i = 0; i < n; ++i)
+                            {
+                                if (includeInternal
+                                        || HDF5Utils.isInternalName(names[i],
+                                                houseKeepingNameSuffix) == false)
+                                {
+                                    info.add(HDF5LinkInformation.create(superGroupName + names[i],
+                                            types[i], null));
+                                }
+                            }
+                            return info;
+                        }
+                    };
+        return runner.call(dataDimensionRunnable);
+    }
+
+    //
+    // Link
+    //
+
+    public void createHardLink(int fileId, String objectName, String linkName)
+    {
+        checkMaxLength(objectName);
+        checkMaxLength(linkName);
+        H5Lcreate_hard(fileId, objectName, fileId, linkName, lcplCreateIntermediateGroups,
+                H5P_DEFAULT);
+    }
+
+    public void createSoftLink(int fileId, String linkName, String targetPath)
+    {
+        checkMaxLength(linkName);
+        checkMaxLength(targetPath);
+        H5Lcreate_soft(targetPath, fileId, linkName, lcplCreateIntermediateGroups, H5P_DEFAULT);
+    }
+
+    public void createExternalLink(int fileId, String linkName, String targetFileName,
+            String targetPath)
+    {
+        checkMaxLength(linkName);
+        checkMaxLength(targetFileName);
+        checkMaxLength(targetPath);
+        H5Lcreate_external(targetFileName, targetPath, fileId, linkName,
+                lcplCreateIntermediateGroups, H5P_DEFAULT);
+    }
+
+    //
+    // Data Set
+    //
+
+    public void writeStringVL(int dataSetId, int dataTypeId, String[] value)
+    {
+        H5DwriteString(dataSetId, dataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, value);
+    }
+
+    public void writeStringVL(int dataSetId, int dataTypeId, int memorySpaceId, int fileSpaceId,
+            String[] value)
+    {
+        H5DwriteString(dataSetId, dataTypeId, memorySpaceId, fileSpaceId, H5P_DEFAULT, value);
+    }
+
+    public int createDataSet(int fileId, long[] dimensions, long[] chunkSizeOrNull, int dataTypeId,
+            HDF5AbstractStorageFeatures compression, String dataSetName, HDF5StorageLayout layout,
+            FileFormat fileFormat, ICleanUpRegistry registry)
+    {
+        checkMaxLength(dataSetName);
+        final int dataSpaceId =
+                H5Screate_simple(dimensions.length, dimensions,
+                        createMaxDimensions(dimensions, (layout == HDF5StorageLayout.CHUNKED)));
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final int dataSetCreationPropertyListId;
+        if (layout == HDF5StorageLayout.CHUNKED && chunkSizeOrNull != null)
+        {
+            dataSetCreationPropertyListId = createDataSetCreationPropertyList(registry);
+            setChunkedLayout(dataSetCreationPropertyListId, chunkSizeOrNull);
+            if (compression.isScaling())
+            {
+                compression.checkScalingOK(fileFormat);
+                final int classTypeId = getClassType(dataTypeId);
+                assert compression.isCompatibleWithDataClass(classTypeId);
+                if (classTypeId == H5T_INTEGER)
+                {
+                    H5Pset_scaleoffset(dataSetCreationPropertyListId, H5Z_SO_INT,
+                            compression.getScalingFactor());
+                } else if (classTypeId == H5T_FLOAT)
+                {
+                    H5Pset_scaleoffset(dataSetCreationPropertyListId, H5Z_SO_FLOAT_DSCALE,
+                            compression.getScalingFactor());
+                }
+            }
+            if (compression.isShuffleBeforeDeflate())
+            {
+                setShuffle(dataSetCreationPropertyListId);
+            }
+            if (compression.isDeflating())
+            {
+                setDeflate(dataSetCreationPropertyListId, compression.getDeflateLevel());
+            }
+        } else if (layout == HDF5StorageLayout.COMPACT)
+        {
+            dataSetCreationPropertyListId =
+                    dataSetCreationPropertyListCompactStorageLayoutFileTimeAlloc;
+        } else
+        {
+            dataSetCreationPropertyListId = dataSetCreationPropertyListFillTimeAlloc;
+        }
+        final int dataSetId =
+                H5Dcreate(fileId, dataSetName, dataTypeId, dataSpaceId,
+                        lcplCreateIntermediateGroups, dataSetCreationPropertyListId, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Dclose(dataSetId);
+                }
+            });
+
+        return dataSetId;
+    }
+
+    private int createDataSetCreationPropertyList(ICleanUpRegistry registry)
+    {
+        final int dataSetCreationPropertyListId = H5Pcreate(H5P_DATASET_CREATE);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(dataSetCreationPropertyListId);
+                }
+            });
+        H5Pset_fill_time(dataSetCreationPropertyListId, H5D_FILL_TIME_ALLOC);
+        return dataSetCreationPropertyListId;
+    }
+
+    /**
+     * Returns one of: COMPACT, CHUNKED, CONTIGUOUS.
+     */
+    public HDF5StorageLayout getLayout(int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataSetCreationPropertyListId = getCreationPropertyList(dataSetId, registry);
+        final int layoutId = H5Pget_layout(dataSetCreationPropertyListId);
+        if (layoutId == H5D_COMPACT)
+        {
+            return HDF5StorageLayout.COMPACT;
+        } else if (layoutId == H5D_CHUNKED)
+        {
+            return HDF5StorageLayout.CHUNKED;
+        } else
+        {
+            return HDF5StorageLayout.CONTIGUOUS;
+        }
+    }
+
+    private int getCreationPropertyList(int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataSetCreationPropertyListId = H5Dget_create_plist(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(dataSetCreationPropertyListId);
+                }
+            });
+        return dataSetCreationPropertyListId;
+    }
+
+    private static final long[] createMaxDimensions(long[] dimensions, boolean unlimited)
+    {
+        if (unlimited == false)
+        {
+            return dimensions;
+        }
+        final long[] maxDimensions = new long[dimensions.length];
+        Arrays.fill(maxDimensions, H5S_UNLIMITED);
+        return maxDimensions;
+    }
+
+    private void setChunkedLayout(int dscpId, long[] chunkSize)
+    {
+        assert dscpId >= 0;
+
+        H5Pset_layout(dscpId, H5D_CHUNKED);
+        H5Pset_chunk(dscpId, chunkSize.length, chunkSize);
+    }
+
+    private void setShuffle(int dscpId)
+    {
+        assert dscpId >= 0;
+
+        H5Pset_shuffle(dscpId);
+    }
+
+    private void setDeflate(int dscpId, int deflateLevel)
+    {
+        assert dscpId >= 0;
+        assert deflateLevel >= 0;
+
+        H5Pset_deflate(dscpId, deflateLevel);
+    }
+
+    public int createScalarDataSet(int fileId, int dataTypeId, String dataSetName,
+            boolean compactLayout, ICleanUpRegistry registry)
+    {
+        checkMaxLength(dataSetName);
+        final int dataSpaceId = H5Screate(H5S_SCALAR);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final int dataSetId =
+                H5Dcreate(
+                        fileId,
+                        dataSetName,
+                        dataTypeId,
+                        dataSpaceId,
+                        lcplCreateIntermediateGroups,
+                        compactLayout ? dataSetCreationPropertyListCompactStorageLayoutFileTimeAlloc
+                                : dataSetCreationPropertyListFillTimeAlloc, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Dclose(dataSetId);
+                }
+            });
+        return dataSetId;
+    }
+
+    public int openDataSet(int fileId, String path, ICleanUpRegistry registry)
+    {
+        checkMaxLength(path);
+        final int dataSetId =
+                isReference(path) ? H5Rdereference(fileId, Long.parseLong(path.substring(1)))
+                        : H5Dopen(fileId, path, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Dclose(dataSetId);
+                }
+            });
+        return dataSetId;
+    }
+
+    boolean isReference(String path)
+    {
+        return autoDereference && (path.charAt(0) == '\0');
+    }
+
+    /**
+     * @param storageDataTypeId The storage type id, if in overwrite mode, or else -1.
+     */
+    public int openAndExtendDataSet(int fileId, String path, FileFormat fileFormat,
+            long[] dimensions, int storageDataTypeId, ICleanUpRegistry registry)
+            throws HDF5JavaException
+    {
+        checkMaxLength(path);
+        final boolean overwriteMode = (storageDataTypeId > -1);
+        final int dataSetId =
+                isReference(path) ? H5Rdereference(fileId, Long.parseLong(path.substring(1)))
+                        : H5Dopen(fileId, path, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Dclose(dataSetId);
+                }
+            });
+        final long[] oldDimensions = getDataDimensions(dataSetId, registry);
+        if (Arrays.equals(oldDimensions, dimensions) == false)
+        {
+            final HDF5StorageLayout layout = getLayout(dataSetId, registry);
+            if (layout == HDF5StorageLayout.CHUNKED)
+            {
+                // Safety check. JHDF5 creates CHUNKED data sets always with unlimited max
+                // dimensions but we may have to work on a file we haven't created.
+                if (areDimensionsInBounds(dataSetId, dimensions, registry))
+                {
+                    final long[] newDimensions =
+                            computeNewDimensions(oldDimensions, dimensions, overwriteMode);
+                    setDataSetExtentChunked(dataSetId, newDimensions);
+                } else
+                {
+                    throw new HDF5JavaException("New data set dimensions are out of bounds.");
+                }
+            } else if (overwriteMode)
+            {
+                throw new HDF5JavaException("Cannot change dimensions on non-extendable data set.");
+            } else
+            {
+                int dataTypeId = getDataTypeForDataSet(dataSetId, registry);
+                if (getClassType(dataTypeId) == H5T_ARRAY)
+                {
+                    throw new HDF5JavaException("Cannot partially overwrite array type.");
+                }
+                if (HDF5Utils.isInBounds(oldDimensions, dimensions) == false)
+                {
+                    throw new HDF5JavaException("New data set dimensions are out of bounds.");
+                }
+            }
+        }
+        return dataSetId;
+    }
+
+    private long[] computeNewDimensions(long[] oldDimensions, long[] newDimensions,
+            boolean cutDownExtendIfNecessary)
+    {
+        if (cutDownExtendIfNecessary)
+        {
+            return newDimensions;
+        } else
+        {
+            final long[] newUncutDimensions = new long[oldDimensions.length];
+            for (int i = 0; i < newUncutDimensions.length; ++i)
+            {
+                newUncutDimensions[i] = Math.max(oldDimensions[i], newDimensions[i]);
+            }
+            return newUncutDimensions;
+        }
+    }
+
+    /**
+     * Checks whether the given <var>dimensions</var> are in bounds for <var>dataSetId</var>.
+     */
+    private boolean areDimensionsInBounds(final int dataSetId, final long[] dimensions,
+            ICleanUpRegistry registry)
+    {
+        final long[] maxDimensions = getDataMaxDimensions(dataSetId, registry);
+
+        if (dimensions.length != maxDimensions.length) // Actually an error condition
+        {
+            return false;
+        }
+
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            if (maxDimensions[i] != H5S_UNLIMITED && dimensions[i] > maxDimensions[i])
+            {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    public void setDataSetExtentChunked(int dataSetId, long[] dimensions)
+    {
+        assert dataSetId >= 0;
+        assert dimensions != null;
+
+        H5Dset_extent(dataSetId, dimensions);
+    }
+
+    public void readDataSetNonNumeric(int dataSetId, int nativeDataTypeId, byte[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+    }
+
+    public void readDataSetNonNumeric(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, byte[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId, H5P_DEFAULT, data);
+    }
+
+    public void readDataSetString(int dataSetId, int nativeDataTypeId, String[] data)
+    {
+        H5Dread_string(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+    }
+
+    public void readDataSetString(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, String[] data)
+    {
+        H5Dread_string(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId, H5P_DEFAULT, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, byte[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, short[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, long[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, float[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, double[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, H5S_ALL, H5S_ALL, numericConversionXferPropertyListID,
+                data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, byte[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, short[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, int[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, long[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, float[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSet(int dataSetId, int nativeDataTypeId, int memorySpaceId,
+            int fileSpaceId, double[] data)
+    {
+        H5Dread(dataSetId, nativeDataTypeId, memorySpaceId, fileSpaceId,
+                numericConversionXferPropertyListID, data);
+    }
+
+    public void readDataSetVL(int dataSetId, int dataTypeId, String[] data)
+    {
+        H5DreadVL(dataSetId, dataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+        replaceNullWithEmptyString(data);
+    }
+
+    public void readDataSetVL(int dataSetId, int dataTypeId, int memorySpaceId, int fileSpaceId,
+            String[] data)
+    {
+        H5DreadVL(dataSetId, dataTypeId, memorySpaceId, fileSpaceId, H5P_DEFAULT, data);
+        replaceNullWithEmptyString(data);
+    }
+
+    // A fixed-length string array returns uninitialized strings as "", a variable-length string as
+    // null. We don't want the application programmer to have to be aware of this difference,
+    // thus we replace null with "" here.
+    private void replaceNullWithEmptyString(String[] data)
+    {
+        for (int i = 0; i < data.length; ++i)
+        {
+            if (data[i] == null)
+            {
+                data[i] = "";
+            }
+        }
+    }
+
+    //
+    // Attribute
+    //
+
+    public int createAttribute(int locationId, String attributeName, int dataTypeId,
+            int dataSpaceIdOrMinusOne, ICleanUpRegistry registry)
+    {
+        checkMaxLength(attributeName);
+        final int dataSpaceId =
+                (dataSpaceIdOrMinusOne == -1) ? H5Screate(H5S_SCALAR) : dataSpaceIdOrMinusOne;
+        if (dataSpaceIdOrMinusOne == -1)
+        {
+            registry.registerCleanUp(new Runnable()
+                {
+                    @Override
+                    public void run()
+                    {
+                        H5Sclose(dataSpaceId);
+                    }
+                });
+        }
+        final int attCreationPlistId;
+        if (useUTF8CharEncoding)
+        {
+            attCreationPlistId = H5Pcreate(H5P_ATTRIBUTE_CREATE);
+            setCharacterEncodingCreationPropertyList(attCreationPlistId, CharacterEncoding.UTF8);
+        } else
+        {
+            attCreationPlistId = H5P_DEFAULT;
+        }
+        final int attributeId =
+                H5Acreate(locationId, attributeName, dataTypeId, dataSpaceId, attCreationPlistId,
+                        H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Aclose(attributeId);
+                }
+            });
+        return attributeId;
+    }
+
+    public int deleteAttribute(int locationId, String attributeName)
+    {
+        checkMaxLength(attributeName);
+        final int success = H5Adelete(locationId, attributeName);
+        return success;
+    }
+
+    public int openAttribute(int locationId, String attributeName, ICleanUpRegistry registry)
+    {
+        checkMaxLength(attributeName);
+        final int attributeId = H5Aopen_name(locationId, attributeName);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Aclose(attributeId);
+                }
+            });
+        return attributeId;
+    }
+
+    public List<String> getAttributeNames(int locationId, ICleanUpRegistry registry)
+    {
+        final int numberOfAttributes = H5Aget_num_attrs(locationId);
+        final List<String> attributeNames = new LinkedList<String>();
+        for (int i = 0; i < numberOfAttributes; ++i)
+        {
+            final int attributeId = H5Aopen_idx(locationId, i);
+            registry.registerCleanUp(new Runnable()
+                {
+                    @Override
+                    public void run()
+                    {
+                        H5Aclose(attributeId);
+                    }
+                });
+            final String[] nameContainer = new String[1];
+            // Find out length of attribute name.
+            final long nameLength = H5Aget_name(attributeId, 0L, null);
+            // Read attribute name
+            final long nameLengthRead = H5Aget_name(attributeId, nameLength + 1, nameContainer);
+            if (nameLengthRead != nameLength)
+            {
+                throw new HDF5JavaException(String.format(
+                        "Error reading attribute name [wrong name length "
+                                + "when reading attribute %d, expected: %d, found: %d]", i,
+                        nameLength, nameLengthRead));
+            }
+            attributeNames.add(nameContainer[0]);
+        }
+        return attributeNames;
+    }
+
+    public byte[] readAttributeAsByteArray(int attributeId, int dataTypeId, int length)
+    {
+        final byte[] data = new byte[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public short[] readAttributeAsShortArray(int attributeId, int dataTypeId, int length)
+    {
+        final short[] data = new short[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public int[] readAttributeAsIntArray(int attributeId, int dataTypeId, int length)
+    {
+        final int[] data = new int[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public long[] readAttributeAsLongArray(int attributeId, int dataTypeId, int length)
+    {
+        final long[] data = new long[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public float[] readAttributeAsFloatArray(int attributeId, int dataTypeId, int length)
+    {
+        final float[] data = new float[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public double[] readAttributeAsDoubleArray(int attributeId, int dataTypeId, int length)
+    {
+        final double[] data = new double[length];
+        H5Aread(attributeId, dataTypeId, data);
+        return data;
+    }
+
+    public void readAttributeVL(int attributeId, int dataTypeId, String[] data)
+    {
+        H5AreadVL(attributeId, dataTypeId, data);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, byte[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, short[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, int[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, long[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, float[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttribute(int attributeId, int dataTypeId, double[] value)
+    {
+        H5Awrite(attributeId, dataTypeId, value);
+    }
+
+    public void writeAttributeStringVL(int attributeId, int dataTypeId, String[] value)
+    {
+        H5AwriteString(attributeId, dataTypeId, value);
+    }
+
+    //
+    // Data Type
+    //
+
+    public int copyDataType(int dataTypeId, ICleanUpRegistry registry)
+    {
+        final int copiedDataTypeId = H5Tcopy(dataTypeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(copiedDataTypeId);
+                }
+            });
+        return copiedDataTypeId;
+    }
+
+    public int createDataTypeVariableString(ICleanUpRegistry registry)
+    {
+        final int dataTypeId = createDataTypeStringVariableLength();
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        if (useUTF8CharEncoding)
+        {
+            setCharacterEncodingDataType(dataTypeId, CharacterEncoding.UTF8);
+        }
+        return dataTypeId;
+    }
+
+    private int createDataTypeStringVariableLength()
+    {
+        int dataTypeId = H5Tcopy(H5T_C_S1);
+        H5Tset_size(dataTypeId, H5T_VARIABLE);
+        return dataTypeId;
+    }
+
+    public int createDataTypeString(int length, ICleanUpRegistry registry)
+    {
+        assert length > 0;
+
+        final int dataTypeId = H5Tcopy(H5T_C_S1);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        H5Tset_size(dataTypeId, length);
+        H5Tset_strpad(dataTypeId, H5T_STR_NULLPAD);
+        if (useUTF8CharEncoding)
+        {
+            setCharacterEncodingDataType(dataTypeId, CharacterEncoding.UTF8);
+        }
+        return dataTypeId;
+    }
+
+    private void setCharacterEncodingDataType(int dataTypeId, CharacterEncoding encoding)
+    {
+        H5Tset_cset(dataTypeId, encoding.getCValue());
+    }
+
+    public int createArrayType(int baseTypeId, int length, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Tarray_create(baseTypeId, 1, new int[]
+            { length });
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public int createArrayType(int baseTypeId, int[] dimensions, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Tarray_create(baseTypeId, dimensions.length, dimensions);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    private enum EnumSize
+    {
+        BYTE8, SHORT16, INT32
+    }
+
+    public int createDataTypeEnum(String[] names, ICleanUpRegistry registry)
+    {
+        for (String name : names)
+        {
+            checkMaxLength(name);
+        }
+        final EnumSize size =
+                (names.length < Byte.MAX_VALUE) ? EnumSize.BYTE8
+                        : (names.length < Short.MAX_VALUE) ? EnumSize.SHORT16 : EnumSize.INT32;
+        final int baseDataTypeId;
+        switch (size)
+        {
+            case BYTE8:
+                baseDataTypeId = H5T_STD_I8LE;
+                break;
+            case SHORT16:
+                baseDataTypeId = H5T_STD_I16LE;
+                break;
+            case INT32:
+                baseDataTypeId = H5T_STD_I32LE;
+                break;
+            default:
+                throw new InternalError();
+        }
+        final int dataTypeId = H5Tenum_create(baseDataTypeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        switch (size)
+        {
+            case BYTE8:
+                for (byte i = 0; i < names.length; ++i)
+                {
+                    insertMemberEnum(dataTypeId, names[i], i);
+                }
+                break;
+            case SHORT16:
+            {
+                final short[] values = getLittleEndianSuccessiveShortValues(names);
+                for (short i = 0; i < names.length; ++i)
+                {
+                    insertMemberEnum(dataTypeId, names[i], values[i]);
+                }
+                break;
+            }
+            case INT32:
+            {
+                final int[] values = getLittleEndianSuccessiveIntValues(names);
+                for (int i = 0; i < names.length; ++i)
+                {
+                    insertMemberEnum(dataTypeId, names[i], values[i]);
+                }
+                break;
+            }
+        }
+        return dataTypeId;
+    }
+
+    private short[] getLittleEndianSuccessiveShortValues(String[] names)
+    {
+        final short[] values = new short[names.length];
+        for (short i = 0; i < names.length; ++i)
+        {
+            values[i] = i;
+        }
+        H5Tconvert_to_little_endian(values);
+        return values;
+    }
+
+    private int[] getLittleEndianSuccessiveIntValues(String[] names)
+    {
+        final int[] values = new int[names.length];
+        for (int i = 0; i < names.length; ++i)
+        {
+            values[i] = i;
+        }
+        H5Tconvert_to_little_endian(values);
+        return values;
+    }
+
+    private void insertMemberEnum(int dataTypeId, String name, byte value)
+    {
+        assert dataTypeId >= 0;
+        assert name != null;
+
+        H5Tenum_insert(dataTypeId, name, value);
+    }
+
+    private void insertMemberEnum(int dataTypeId, String name, short value)
+    {
+        assert dataTypeId >= 0;
+        assert name != null;
+
+        H5Tenum_insert(dataTypeId, name, value);
+    }
+
+    private void insertMemberEnum(int dataTypeId, String name, int value)
+    {
+        assert dataTypeId >= 0;
+        assert name != null;
+
+        H5Tenum_insert(dataTypeId, name, value);
+    }
+
+    /** Returns the number of members of an enum type or a compound type. */
+    public int getNumberOfMembers(int dataTypeId)
+    {
+        return H5Tget_nmembers(dataTypeId);
+    }
+
+    /**
+     * Returns the name of an enum value or compound member for the given <var>index</var>.
+     * <p>
+     * Must not be called on a <var>dateTypeId</var> that is not an enum or compound type.
+     */
+    public String getNameForEnumOrCompoundMemberIndex(int dataTypeId, int index)
+    {
+        return H5Tget_member_name(dataTypeId, index);
+    }
+
+    /**
+     * Returns the offset of a compound member for the given <var>index</var>.
+     * <p>
+     * Must not be called on a <var>dateTypeId</var> that is not a compound type.
+     */
+    public int getOffsetForCompoundMemberIndex(int dataTypeId, int index)
+    {
+        return (int) H5Tget_member_offset(dataTypeId, index);
+    }
+
+    /**
+     * Returns the names of an enum value or compound members.
+     * <p>
+     * Must not be called on a <var>dateTypeId</var> that is not an enum or compound type.
+     */
+    public String[] getNamesForEnumOrCompoundMembers(int dataTypeId)
+    {
+        final int len = getNumberOfMembers(dataTypeId);
+        final String[] values = new String[len];
+        for (int i = 0; i < len; ++i)
+        {
+            values[i] = H5Tget_member_name(dataTypeId, i);
+        }
+        return values;
+    }
+
+    /**
+     * Returns the index of an enum value or compound member for the given <var>name</var>. Works on
+     * enum and compound data types.
+     */
+    public int getIndexForMemberName(int dataTypeId, String name)
+    {
+        checkMaxLength(name);
+        return H5Tget_member_index(dataTypeId, name);
+    }
+
+    /**
+     * Returns the data type id for a member of a compound data type, specified by index.
+     */
+    public int getDataTypeForIndex(int compoundDataTypeId, int index, ICleanUpRegistry registry)
+    {
+        final int memberTypeId = H5Tget_member_type(compoundDataTypeId, index);
+        registry.registerCleanUp(new Runnable()
+            {
+
+                @Override
+                public void run()
+                {
+                    H5Tclose(memberTypeId);
+                }
+            });
+        return memberTypeId;
+    }
+
+    /**
+     * Returns the data type id for a member of a compound data type, specified by name.
+     */
+    public int getDataTypeForMemberName(int compoundDataTypeId, String memberName)
+    {
+        checkMaxLength(memberName);
+        final int index = H5Tget_member_index(compoundDataTypeId, memberName);
+        return H5Tget_member_type(compoundDataTypeId, index);
+    }
+
+    public Boolean tryGetBooleanValue(final int dataTypeId, final int intValue)
+    {
+        if (getClassType(dataTypeId) != H5T_ENUM)
+        {
+            return null;
+        }
+        final String value = getNameForEnumOrCompoundMemberIndex(dataTypeId, intValue);
+        if ("TRUE".equalsIgnoreCase(value))
+        {
+            return true;
+        } else if ("FALSE".equalsIgnoreCase(value))
+        {
+            return false;
+        } else
+        {
+            return null;
+        }
+    }
+
+    public int createDataTypeCompound(int lengthInBytes, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Tcreate(H5T_COMPOUND, lengthInBytes);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public int createDataTypeOpaque(int lengthInBytes, String tag, ICleanUpRegistry registry)
+    {
+        checkMaxLength(tag);
+        final int dataTypeId = H5Tcreate(H5T_OPAQUE, lengthInBytes);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        H5Tset_tag(dataTypeId,
+                tag.length() > H5T_OPAQUE_TAG_MAX ? tag.substring(0, H5T_OPAQUE_TAG_MAX) : tag);
+        return dataTypeId;
+    }
+
+    public void commitDataType(int fileId, String name, int dataTypeId)
+    {
+        checkMaxLength(name);
+        H5Tcommit(fileId, name, dataTypeId, lcplCreateIntermediateGroups, H5P_DEFAULT, H5P_DEFAULT);
+    }
+
+    public int openDataType(int fileId, String name, ICleanUpRegistry registry)
+    {
+        checkMaxLength(name);
+        final int dataTypeId =
+                isReference(name) ? H5Rdereference(fileId, Long.parseLong(name.substring(1)))
+                        : H5Topen(fileId, name, H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public boolean dataTypesAreEqual(int dataTypeId1, int dataTypeId2)
+    {
+        return H5Tequal(dataTypeId1, dataTypeId2);
+    }
+
+    public int getDataTypeForDataSet(int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Dget_type(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public int getDataTypeForAttribute(int attributeId, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Aget_type(attributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public String tryGetOpaqueTag(int dataTypeId)
+    {
+        return H5Tget_tag(dataTypeId);
+    }
+
+    public int getNativeDataType(int dataTypeId, ICleanUpRegistry registry)
+    {
+        final int nativeDataTypeId = H5Tget_native_type(dataTypeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(nativeDataTypeId);
+                }
+            });
+        return nativeDataTypeId;
+    }
+
+    public int getNativeDataTypeForDataSet(int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Dget_type(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return getNativeDataType(dataTypeId, registry);
+    }
+
+    public int getNativeDataTypeForAttribute(int attributeId, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Aget_type(attributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(dataTypeId);
+                }
+            });
+        return getNativeDataType(dataTypeId, registry);
+    }
+
+    public int getDataTypeSize(int dataTypeId)
+    {
+        return H5Tget_size(dataTypeId);
+    }
+
+    public long getDataTypeSizeLong(int dataTypeId) throws HDF5JavaException
+    {
+        return H5Tget_size_long(dataTypeId);
+    }
+
+    public boolean isVariableLengthString(int dataTypeId)
+    {
+        return H5Tis_variable_str(dataTypeId);
+    }
+
+    public int getClassType(int dataTypeId)
+    {
+        return H5Tget_class(dataTypeId);
+    }
+
+    public CharacterEncoding getCharacterEncoding(int dataTypeId)
+    {
+        final int cValue = H5Tget_cset(dataTypeId);
+        if (cValue == CharacterEncoding.ASCII.getCValue())
+        {
+            return CharacterEncoding.ASCII;
+        } else if (cValue == CharacterEncoding.UTF8.getCValue())
+        {
+            return CharacterEncoding.UTF8;
+        } else
+        {
+            throw new HDF5JavaException("Unknown character encoding cValue " + cValue);
+        }
+    }
+
+    public boolean hasClassType(int dataTypeId, int classTypeId)
+    {
+        return H5Tdetect_class(dataTypeId, classTypeId);
+    }
+
+    public int getBaseDataType(int dataTypeId, ICleanUpRegistry registry)
+    {
+        final int baseDataTypeId = H5Tget_super(dataTypeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Tclose(baseDataTypeId);
+                }
+            });
+        return baseDataTypeId;
+    }
+
+    public boolean getSigned(int dataTypeId)
+    {
+        return H5Tget_sign(dataTypeId) != H5T_SGN_NONE;
+    }
+
+    public String tryGetDataTypePath(int dataTypeId)
+    {
+        if (dataTypeId < 0 || H5Tcommitted(dataTypeId) == false)
+        {
+            return null;
+        }
+        final String[] result = new String[1];
+        final long len = H5Iget_name(dataTypeId, result, 64);
+        if (len >= result[0].length())
+        {
+            H5Iget_name(dataTypeId, result, len + 1);
+        }
+        return result[0];
+    }
+
+    /**
+     * Reclaims the variable-length data structures from a compound buffer, if any.
+     */
+    public void reclaimCompoundVL(HDF5CompoundType<?> type, byte[] buf)
+    {
+        int[] vlMemberIndices = type.getObjectByteifyer().getVLMemberIndices();
+        if (vlMemberIndices.length > 0) // This type has variable-length data members
+        {
+            HDFNativeData.freeCompoundVLStr(buf, type.getRecordSizeInMemory(), vlMemberIndices);
+        }
+    }
+
+    //
+    // Data Space
+    //
+
+    public int getDataSpaceForDataSet(int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataTypeId = H5Dget_space(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataTypeId);
+                }
+            });
+        return dataTypeId;
+    }
+
+    public long[] getDataDimensionsForAttribute(final int attributeId, ICleanUpRegistry registry)
+    {
+        final int dataSpaceId = H5Aget_space(attributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final long[] dimensions = getDataSpaceDimensions(dataSpaceId);
+        return dimensions;
+    }
+
+    public long[] getDataDimensions(final int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataSpaceId = H5Dget_space(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        long[] dimensions = getDataSpaceDimensions(dataSpaceId);
+        // Ensure backward compatibility with 8.10
+        if (HDF5Utils.mightBeEmptyInStorage(dimensions)
+                && existsAttribute(dataSetId, HDF5Utils.DATASET_IS_EMPTY_LEGACY_ATTRIBUTE))
+        {
+            dimensions = new long[dimensions.length];
+        }
+        return dimensions;
+    }
+
+    public long[] getDataMaxDimensions(final int dataSetId)
+    {
+        ICallableWithCleanUp<long[]> dataDimensionRunnable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    return getDataMaxDimensions(dataSetId, registry);
+                }
+
+            };
+        return runner.call(dataDimensionRunnable);
+    }
+
+    private long[] getDataMaxDimensions(final int dataSetId, ICleanUpRegistry registry)
+    {
+        final int dataSpaceId = H5Dget_space(dataSetId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final long[] dimensions = getDataSpaceMaxDimensions(dataSpaceId);
+        return dimensions;
+    }
+
+    public int getDataSpaceRank(int dataSpaceId)
+    {
+        return H5Sget_simple_extent_ndims(dataSpaceId);
+    }
+
+    public long[] getDataSpaceDimensions(int dataSpaceId)
+    {
+        final int rank = H5Sget_simple_extent_ndims(dataSpaceId);
+        return getDataSpaceDimensions(dataSpaceId, rank);
+    }
+
+    public long[] getDataSpaceDimensions(int dataSpaceId, int rank)
+    {
+        assert dataSpaceId >= 0;
+        assert rank >= 0;
+
+        final long[] dimensions = new long[rank];
+        H5Sget_simple_extent_dims(dataSpaceId, dimensions, null);
+        return dimensions;
+    }
+
+    public long[] getDataSpaceMaxDimensions(int dataSpaceId)
+    {
+        final int rank = H5Sget_simple_extent_ndims(dataSpaceId);
+        return getDataSpaceMaxDimensions(dataSpaceId, rank);
+    }
+
+    public long[] getDataSpaceMaxDimensions(int dataSpaceId, int rank)
+    {
+        assert dataSpaceId >= 0;
+        assert rank >= 0;
+
+        final long[] maxDimensions = new long[rank];
+        H5Sget_simple_extent_dims(dataSpaceId, null, maxDimensions);
+        return maxDimensions;
+    }
+
+    /**
+     * @param dataSetOrAttributeId The id of either the data set or the attribute to get the rank
+     *            for.
+     * @param isAttribute If <code>true</code>, <var>dataSetOrAttributeId</var> will be interpreted
+     *            as an attribute, otherwise as a data set.
+     */
+    public int getRank(final int dataSetOrAttributeId, final boolean isAttribute,
+            ICleanUpRegistry registry)
+    {
+        final int dataSpaceId =
+                isAttribute ? H5Aget_space(dataSetOrAttributeId)
+                        : H5Dget_space(dataSetOrAttributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        return H5Sget_simple_extent_ndims(dataSpaceId);
+    }
+
+    /**
+     * @param dataSetOrAttributeId The id of either the data set or the attribute to get the rank
+     *            for.
+     * @param isAttribute If <code>true</code>, <var>dataSetOrAttributeId</var> will be interpreted
+     *            as an attribute, otherwise as a data set.
+     */
+    public long[] getDimensions(final int dataSetOrAttributeId, final boolean isAttribute,
+            ICleanUpRegistry registry)
+    {
+        final int dataSpaceId =
+                isAttribute ? H5Aget_space(dataSetOrAttributeId)
+                        : H5Dget_space(dataSetOrAttributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final long[] dimensions = new long[H5S_MAX_RANK];
+        final int rank = H5Sget_simple_extent_dims(dataSpaceId, dimensions, null);
+        final long[] realDimensions = new long[rank];
+        System.arraycopy(dimensions, 0, realDimensions, 0, rank);
+        return realDimensions;
+    }
+
+    /**
+     * @param dataSetOrAttributeId The id of either the data set or the attribute to get the
+     *            dimensions for.
+     * @param isAttribute If <code>true</code>, <var>dataSetOrAttributeId</var> will be interpreted
+     *            as an attribute, otherwise as a data set.
+     * @param dataSetInfo The info object to fill.
+     */
+    public void fillDataDimensions(final int dataSetOrAttributeId, final boolean isAttribute,
+            final HDF5DataSetInformation dataSetInfo, ICleanUpRegistry registry)
+    {
+        final int dataSpaceId =
+                isAttribute ? H5Aget_space(dataSetOrAttributeId)
+                        : H5Dget_space(dataSetOrAttributeId);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        final long[] dimensions = new long[H5S_MAX_RANK];
+        final long[] maxDimensions = new long[H5S_MAX_RANK];
+        final int rank = H5Sget_simple_extent_dims(dataSpaceId, dimensions, maxDimensions);
+        final long[] realDimensions = new long[rank];
+        System.arraycopy(dimensions, 0, realDimensions, 0, rank);
+        final long[] realMaxDimensions = new long[rank];
+        System.arraycopy(maxDimensions, 0, realMaxDimensions, 0, rank);
+        dataSetInfo.setDimensions(realDimensions);
+        dataSetInfo.setMaxDimensions(realMaxDimensions);
+        if (isAttribute == false)
+        {
+            final long[] chunkSizes = new long[rank];
+            final int creationPropertyList =
+                    getCreationPropertyList(dataSetOrAttributeId, registry);
+            final HDF5StorageLayout layout =
+                    HDF5StorageLayout.fromId(H5Pget_layout(creationPropertyList));
+            dataSetInfo.setStorageLayout(layout);
+            if (layout == HDF5StorageLayout.CHUNKED)
+            {
+                H5Pget_chunk(creationPropertyList, rank, chunkSizes);
+                dataSetInfo.setChunkSizes(MDAbstractArray.toInt(chunkSizes));
+            }
+        }
+    }
+
+    public int[] getArrayDimensions(int arrayTypeId)
+    {
+        final int rank = H5Tget_array_ndims(arrayTypeId);
+        final int[] dims = new int[rank];
+        H5Tget_array_dims(arrayTypeId, dims);
+        return dims;
+    }
+
+    public int createScalarDataSpace()
+    {
+        return H5Screate(H5S_SCALAR);
+    }
+
+    public int createSimpleDataSpace(long[] dimensions, ICleanUpRegistry registry)
+    {
+        final int dataSpaceId = H5Screate_simple(dimensions.length, dimensions, null);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Sclose(dataSpaceId);
+                }
+            });
+        return dataSpaceId;
+    }
+
+    public void setHyperslabBlock(int dataSpaceId, long[] start, long[] count)
+    {
+        assert dataSpaceId >= 0;
+        assert start != null;
+        assert count != null;
+
+        H5Sselect_hyperslab(dataSpaceId, H5S_SELECT_SET, start, null, count, null);
+    }
+
+    //
+    // Properties
+    //
+
+    private int createLinkCreationPropertyList(boolean createIntermediateGroups,
+            ICleanUpRegistry registry)
+    {
+        final int linkCreationPropertyList = H5Pcreate(H5P_LINK_CREATE);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(linkCreationPropertyList);
+                }
+            });
+        if (createIntermediateGroups)
+        {
+            H5Pset_create_intermediate_group(linkCreationPropertyList, true);
+        }
+        if (useUTF8CharEncoding)
+        {
+            setCharacterEncodingCreationPropertyList(linkCreationPropertyList,
+                    CharacterEncoding.UTF8);
+        }
+        return linkCreationPropertyList;
+    }
+
+    // Only use with H5P_LINK_CREATE, H5P_ATTRIBUTE_CREATE and H5P_STRING_CREATE property list ids
+    private void setCharacterEncodingCreationPropertyList(int creationPropertyList,
+            CharacterEncoding encoding)
+    {
+        H5Pset_char_encoding(creationPropertyList, encoding.getCValue());
+    }
+
+    private int createDataSetXferPropertyListAbortOverflow(ICleanUpRegistry registry)
+    {
+        final int datasetXferPropertyList = H5Pcreate_xfer_abort_overflow();
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(datasetXferPropertyList);
+                }
+            });
+        return datasetXferPropertyList;
+    }
+
+    private int createDataSetXferPropertyListAbort(ICleanUpRegistry registry)
+    {
+        final int datasetXferPropertyList = H5Pcreate_xfer_abort();
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Pclose(datasetXferPropertyList);
+                }
+            });
+        return datasetXferPropertyList;
+    }
+
+    //
+    // References
+    //
+
+    String getReferencedObjectName(int objectId, byte[] reference)
+    {
+        return H5Rget_name(objectId, H5R_OBJECT, reference);
+    }
+
+    String getReferencedObjectName(int objectId, long reference)
+    {
+        return H5Rget_name(objectId, reference);
+    }
+
+    String[] getReferencedObjectNames(int objectId, long[] reference)
+    {
+        return H5Rget_name(objectId, reference);
+    }
+
+    String getReferencedObjectName(int objectId, byte[] references, int ofs)
+    {
+        final byte[] reference = new byte[HDF5BaseReader.REFERENCE_SIZE_IN_BYTES];
+        System.arraycopy(references, ofs, reference, 0, HDF5BaseReader.REFERENCE_SIZE_IN_BYTES);
+        return H5Rget_name(objectId, H5R_OBJECT, reference);
+    }
+
+    byte[] createObjectReference(int fileId, String objectPath)
+    {
+        return H5Rcreate(fileId, objectPath, H5R_OBJECT, -1);
+    }
+
+    long[] createObjectReferences(int fileId, String[] objectPaths)
+    {
+        return H5Rcreate(fileId, objectPaths);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5AbstractStorageFeatures.java b/source/java/ch/systemsx/cisd/hdf5/HDF5AbstractStorageFeatures.java
new file mode 100644
index 0000000..755b0bd
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5AbstractStorageFeatures.java
@@ -0,0 +1,378 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * An object representing the storage features that are to be used for a data set.
+ * <p>
+ * The available storage layouts are {@link HDF5StorageLayout#COMPACT},
+ * {@link HDF5StorageLayout#CONTIGUOUS} or {@link HDF5StorageLayout#CHUNKED} can be chosen. Only
+ * {@link HDF5StorageLayout#CHUNKED} is extendable and can be compressed.
+ * <p>
+ * Two types of compressions are supported: <i>deflation</i> (the method used by <code>gzip</code>)
+ * and <i>scaling</i>, which can be used if the accuracy of the values are smaller than what the
+ * atomic data type can store. Note that <i>scaling</i> in general can be a lossy compression while
+ * <i>deflation</i> is always lossless. <i>Scaling</i> compression is only available with HDF5 1.8
+ * and newer. Trying to use <i>scaling</i> in strict HDF5 1.6 compatibility mode will throw an
+ * {@link IllegalStateException}.
+ * <p>
+ * For <i>deflation</i> the deflation level can be chosen to get the right balance between speed of
+ * compression and compression ratio. Often the {@link #DEFAULT_DEFLATION_LEVEL} will be the right
+ * choice.
+ * <p>
+ * For <i>scaling</i>, the scaling factor can be chosen that determines the accuracy of the values
+ * saved. What exactly the scaling factor means, differs between float and integer values.
+ * 
+ * @author Bernd Rinn
+ */
+abstract class HDF5AbstractStorageFeatures
+{
+    /**
+     * A constant that specifies that no deflation should be used.
+     */
+    public final static byte NO_DEFLATION_LEVEL = 0;
+
+    /**
+     * A constant that specifies the default deflation level (gzip compression).
+     */
+    public final static byte DEFAULT_DEFLATION_LEVEL = 6;
+
+    /**
+     * A constant that specifies the maximal deflation level (gzip compression).
+     */
+    public final static byte MAX_DEFLATION_LEVEL = 9;
+
+    /**
+     * The policy on how to deal with write access to existing datasets. "Keeping the dataset" means
+     * to overwrite the content of the dataset, while "replacing the dataset" refers to deleting the
+     * existing dataset and create a new one.
+     */
+    public enum DataSetReplacementPolicy
+    {
+        /** Use the default behavior as specified when the writer was created. */
+        USE_WRITER_DEFAULT,
+        /** Enforce to keep the existing dataset, overwriting the writer's default. */
+        ENFORCE_KEEP_EXISTING,
+        /** Enforce to replace the existing dataset, overwriting the writer's default. */
+        ENFORCE_REPLACE_WITH_NEW
+    }
+
+    /**
+     * Do not perform any scaling on the data.
+     */
+    final static byte NO_SCALING_FACTOR = -1;
+
+    static byte toByte(int i)
+    {
+        final byte b = (byte) i;
+        if (b != i)
+        {
+            throw new HDF5JavaException("Value " + i + " cannot be casted to type byte");
+        }
+        return b;
+    }
+
+    private final byte deflateLevel;
+
+    private final byte scalingFactor;
+
+    private final DataSetReplacementPolicy datasetReplacementPolicy;
+
+    private final HDF5StorageLayout proposedLayoutOrNull;
+
+    private final boolean shuffleBeforeDeflate;
+
+    public abstract static class HDF5AbstractStorageFeatureBuilder
+    {
+        private byte deflateLevel;
+
+        private byte scalingFactor;
+
+        private HDF5StorageLayout storageLayout;
+
+        private DataSetReplacementPolicy datasetReplacementPolicy =
+                DataSetReplacementPolicy.USE_WRITER_DEFAULT;
+
+        private boolean shuffleBeforeDeflate;
+
+        HDF5AbstractStorageFeatureBuilder()
+        {
+        }
+
+        public HDF5AbstractStorageFeatureBuilder(HDF5AbstractStorageFeatures template)
+        {
+            deflateLevel(template.getDeflateLevel());
+            scalingFactor(template.getScalingFactor());
+            storageLayout(template.tryGetProposedLayout());
+            datasetReplacementPolicy(template.getDatasetReplacementPolicy());
+            shuffleBeforeDeflate(template.isShuffleBeforeDeflate());
+        }
+
+        byte getDeflateLevel()
+        {
+            return deflateLevel;
+        }
+
+        byte getScalingFactor()
+        {
+            return scalingFactor;
+        }
+
+        HDF5StorageLayout getStorageLayout()
+        {
+            return storageLayout;
+        }
+
+        DataSetReplacementPolicy getDatasetReplacementPolicy()
+        {
+            return datasetReplacementPolicy;
+        }
+
+        boolean isShuffleBeforeDeflate()
+        {
+            return shuffleBeforeDeflate;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder compress(boolean compress)
+        {
+            this.deflateLevel = compress ? DEFAULT_DEFLATION_LEVEL : NO_DEFLATION_LEVEL;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder compress()
+        {
+            this.deflateLevel = DEFAULT_DEFLATION_LEVEL;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder deflateLevel(@SuppressWarnings("hiding")
+        byte deflateLevel)
+        {
+            this.deflateLevel = deflateLevel;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder scalingFactor(@SuppressWarnings("hiding")
+        byte scalingFactor)
+        {
+            this.scalingFactor = scalingFactor;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder noScaling()
+        {
+            this.scalingFactor = (byte) -1;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder shuffleBeforeDeflate(@SuppressWarnings("hiding")
+        boolean shuffleBeforeDeflate)
+        {
+            this.shuffleBeforeDeflate = shuffleBeforeDeflate;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder shuffleBeforeDeflate()
+        {
+            this.shuffleBeforeDeflate = true;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder noShuffleBeforeDeflate()
+        {
+            this.shuffleBeforeDeflate = true;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder storageLayout(@SuppressWarnings("hiding")
+        HDF5StorageLayout storageLayout)
+        {
+            this.storageLayout = storageLayout;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder compactStorageLayout()
+        {
+            this.storageLayout = HDF5StorageLayout.COMPACT;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder contiguousStorageLayout()
+        {
+            this.storageLayout = HDF5StorageLayout.CONTIGUOUS;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder chunkedStorageLayout()
+        {
+            this.storageLayout = HDF5StorageLayout.CHUNKED;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder defaultStorageLayout()
+        {
+            this.storageLayout = null;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder datasetReplacementPolicy(
+                @SuppressWarnings("hiding")
+                DataSetReplacementPolicy datasetReplacementPolicy)
+        {
+            this.datasetReplacementPolicy = datasetReplacementPolicy;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder datasetReplacementUseWriterDefault()
+        {
+            this.datasetReplacementPolicy = DataSetReplacementPolicy.USE_WRITER_DEFAULT;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder datasetReplacementEnforceKeepExisting()
+        {
+            this.datasetReplacementPolicy = DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING;
+            return this;
+        }
+
+        public HDF5AbstractStorageFeatureBuilder datasetReplacementEnforceReplaceWithNew()
+        {
+            this.datasetReplacementPolicy = DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW;
+            return this;
+        }
+
+        abstract public HDF5AbstractStorageFeatures features();
+    }
+
+    HDF5AbstractStorageFeatures(final HDF5StorageLayout proposedLayoutOrNull,
+            final DataSetReplacementPolicy datasetReplacementPolicy, final byte deflateLevel,
+            final byte scalingFactor)
+    {
+        this(proposedLayoutOrNull, datasetReplacementPolicy, false, deflateLevel, scalingFactor);
+    }
+
+    HDF5AbstractStorageFeatures(final HDF5StorageLayout proposedLayoutOrNull,
+            final DataSetReplacementPolicy datasetReplacementPolicy,
+            final boolean shuffleBeforeDeflate, final byte deflateLevel, final byte scalingFactor)
+    {
+        if (deflateLevel < 0)
+        {
+            throw new IllegalArgumentException("Invalid deflateLevel " + deflateLevel);
+        }
+        this.proposedLayoutOrNull = proposedLayoutOrNull;
+        this.datasetReplacementPolicy = datasetReplacementPolicy;
+        this.shuffleBeforeDeflate = shuffleBeforeDeflate;
+        this.deflateLevel = deflateLevel;
+        this.scalingFactor = scalingFactor;
+    }
+
+    /**
+     * Returns true, if this compression setting can be applied on the given <var>dataClassId</var>.
+     */
+    abstract boolean isCompatibleWithDataClass(int dataClassId);
+
+    /**
+     * Returns the proposed storage layout, or <code>null</code>, if no particular storage layout
+     * should be proposed.
+     */
+    public HDF5StorageLayout tryGetProposedLayout()
+    {
+        return proposedLayoutOrNull;
+    }
+
+    /**
+     * Returns the policy of this storage feature object regarding replacing or keeping already
+     * existing datasets.
+     */
+    public DataSetReplacementPolicy getDatasetReplacementPolicy()
+    {
+        return datasetReplacementPolicy;
+    }
+
+    boolean requiresChunking()
+    {
+        return isDeflating() || isScaling() || proposedLayoutOrNull == HDF5StorageLayout.CHUNKED;
+    }
+
+    boolean allowsCompact()
+    {
+        return proposedLayoutOrNull == null || proposedLayoutOrNull == HDF5StorageLayout.COMPACT;
+    }
+
+    /**
+     * Returns <code>true</code>, if this storage feature object deflates data.
+     */
+    public boolean isDeflating()
+    {
+        return (deflateLevel != NO_DEFLATION_LEVEL);
+    }
+
+    /**
+     * Returns <code>true</code>, if this storage feature object scales data.
+     */
+    public boolean isScaling()
+    {
+        return scalingFactor >= 0;
+    }
+
+    void checkScalingOK(FileFormat fileFormat) throws IllegalStateException
+    {
+        if (fileFormat.isHDF5_1_8_OK() == false)
+        {
+            throw new IllegalStateException(
+                    "Scaling compression is not allowed in strict HDF5 1.6.x compatibility mode.");
+        }
+    }
+
+    /**
+     * Returns <code>true</code>, if this storage feature object performs shuffling before deflating
+     * the data.
+     */
+    public boolean isShuffleBeforeDeflate()
+    {
+        return shuffleBeforeDeflate;
+    }
+
+    /**
+     * Returns the deflate level of this storage feature object. 0 means no deflate.
+     */
+    public byte getDeflateLevel()
+    {
+        return deflateLevel;
+    }
+
+    /**
+     * Returns the scaling factor of this storage feature object. -1 means no scaling, 0 means
+     * auto-scaling.
+     */
+    public byte getScalingFactor()
+    {
+        return scalingFactor;
+    }
+
+    static DataSetReplacementPolicy getDataSetReplacementPolicy(boolean keepDataSetIfExists,
+            boolean deleteDataSetIfExists)
+    {
+        return keepDataSetIfExists ? DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING
+                : (deleteDataSetIfExists ? DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW
+                        : DataSetReplacementPolicy.USE_WRITER_DEFAULT);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5BaseReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5BaseReader.java
new file mode 100644
index 0000000..3799a13
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5BaseReader.java
@@ -0,0 +1,1665 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createAttributeTypeVariantAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createObjectTypeVariantAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getBooleanDataTypePath;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getDataTypeGroup;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getOneDimensionalArraySize;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getTypeVariantDataTypePath;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getTypeVariantMembersAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getVariableLengthStringDataTypePath;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.removeInternalNames;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ENUM;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT32;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STRING;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5FileNotFoundException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.cleanup.CleanUpCallable;
+import ch.systemsx.cisd.hdf5.cleanup.CleanUpRegistry;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * Class that provides base methods for reading HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5BaseReader
+{
+
+    /** State that this reader / writer is currently in. */
+    protected enum State
+    {
+        CONFIG, OPEN, CLOSED
+    }
+
+    /** The size of a reference in bytes. */
+    static final int REFERENCE_SIZE_IN_BYTES = 8;
+
+    protected final File hdf5File;
+
+    protected final CleanUpCallable runner;
+
+    protected final CleanUpRegistry fileRegistry;
+
+    protected final boolean performNumericConversions;
+
+    /** Map from named data types to ids. */
+    private final Map<String, Integer> namedDataTypeMap;
+
+    private class DataTypeContainer
+    {
+        final int typeId;
+
+        final String typePath;
+
+        DataTypeContainer(int typeId, String typePath)
+        {
+            this.typeId = typeId;
+            this.typePath = typePath;
+        }
+    }
+
+    private final List<DataTypeContainer> namedDataTypeList;
+
+    protected final HDF5 h5;
+
+    protected final int fileId;
+
+    protected final int booleanDataTypeId;
+
+    protected final int variableLengthStringDataTypeId;
+
+    protected final HDF5EnumerationType typeVariantDataType;
+
+    protected State state;
+
+    final String houseKeepingNameSuffix;
+
+    final CharacterEncoding encodingForNewDataSets;
+
+    HDF5BaseReader(File hdf5File, boolean performNumericConversions, boolean autoDereference,
+            FileFormat fileFormat, boolean overwrite, String preferredHouseKeepingNameSuffix)
+    {
+        this(hdf5File, performNumericConversions, false, autoDereference, fileFormat, overwrite,
+                preferredHouseKeepingNameSuffix);
+    }
+
+    HDF5BaseReader(File hdf5File, boolean performNumericConversions, boolean useUTF8CharEncoding,
+            boolean autoDereference, FileFormat fileFormat, boolean overwrite,
+            String preferredHouseKeepingNameSuffix)
+    {
+        assert hdf5File != null;
+        assert preferredHouseKeepingNameSuffix != null;
+
+        this.performNumericConversions = performNumericConversions;
+        this.hdf5File = hdf5File.getAbsoluteFile();
+        this.runner = new CleanUpCallable();
+        this.fileRegistry = CleanUpRegistry.createSynchonized();
+        this.namedDataTypeMap = new HashMap<String, Integer>();
+        this.namedDataTypeList = new ArrayList<DataTypeContainer>();
+        this.encodingForNewDataSets =
+                useUTF8CharEncoding ? CharacterEncoding.UTF8 : CharacterEncoding.ASCII;
+        this.h5 =
+                new HDF5(fileRegistry, runner, performNumericConversions, useUTF8CharEncoding,
+                        autoDereference);
+        this.fileId = openFile(fileFormat, overwrite);
+        this.state = State.OPEN;
+
+        final String houseKeepingNameSuffixFromFileOrNull = tryGetHouseKeepingNameSuffix();
+        this.houseKeepingNameSuffix =
+                (houseKeepingNameSuffixFromFileOrNull == null) ? preferredHouseKeepingNameSuffix
+                        : houseKeepingNameSuffixFromFileOrNull;
+        readNamedDataTypes();
+        variableLengthStringDataTypeId = openOrCreateVLStringType();
+        booleanDataTypeId = openOrCreateBooleanDataType();
+        typeVariantDataType = openOrCreateTypeVariantDataType();
+    }
+
+    void copyObject(String srcPath, int dstFileId, String dstPath)
+    {
+        final boolean dstIsDir = dstPath.endsWith("/");
+        if (dstIsDir && h5.exists(dstFileId, dstPath) == false)
+        {
+            h5.createGroup(dstFileId, dstPath);
+        }
+        if ("/".equals(srcPath))
+        {
+            final String dstDir = dstIsDir ? dstPath : dstPath + "/";
+            for (String object : getGroupMembers("/"))
+            {
+                h5.copyObject(fileId, object, dstFileId, dstDir + object);
+            }
+        } else if (dstIsDir)
+        {
+            final int idx = srcPath.lastIndexOf('/');
+            final String sourceObjectName = srcPath.substring(idx < 0 ? 0 : idx);
+            h5.copyObject(fileId, srcPath, dstFileId, dstPath + sourceObjectName);
+        } else
+        {
+            h5.copyObject(fileId, srcPath, dstFileId, dstPath);
+        }
+    }
+
+    int openFile(FileFormat fileFormat, boolean overwrite)
+    {
+        if (hdf5File.exists() == false)
+        {
+            throw new HDF5FileNotFoundException(hdf5File, "Path does not exit.");
+        }
+        if (hdf5File.canRead() == false)
+        {
+            throw new HDF5FileNotFoundException(hdf5File, "Path is not readable.");
+        }
+        if (hdf5File.isFile() == false)
+        {
+            throw new HDF5FileNotFoundException(hdf5File, "Path is not a file.");
+        }
+        if (HDF5Factory.isHDF5File(hdf5File) == false)
+        {
+            throw new HDF5FileNotFoundException(hdf5File, "Path is not a valid HDF5 file.");
+        }
+        return h5.openFileReadOnly(hdf5File.getPath(), fileRegistry);
+    }
+
+    void checkOpen() throws HDF5JavaException
+    {
+        if (state != State.OPEN)
+        {
+            final String msg =
+                    "HDF5 file '" + hdf5File.getPath() + "' is "
+                            + (state == State.CLOSED ? "closed." : "not opened yet.");
+            throw new HDF5JavaException(msg);
+        }
+    }
+
+    /**
+     * Closes this object and the file referenced by this object. This object must not be used after
+     * being closed.
+     */
+    void close()
+    {
+        synchronized (fileRegistry)
+        {
+            if (state == State.OPEN)
+            {
+                fileRegistry.cleanUp(false);
+            }
+            state = State.CLOSED;
+        }
+    }
+
+    boolean isClosed()
+    {
+        return state == State.CLOSED;
+    }
+
+    String tryGetHouseKeepingNameSuffix()
+    {
+        final ICallableWithCleanUp<String> readRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int objectId = h5.openObject(fileId, "/", registry);
+                    if (h5.existsAttribute(objectId,
+                            HDF5Utils.HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME))
+                    {
+                        final int suffixLen =
+                                getHousekeepingAttributeExplicitStringLength(objectId, registry);
+                        final boolean explicitLengthStored = (suffixLen >= 0);
+                        final String rawSuffix =
+                                getStringAttribute(objectId, "/",
+                                        HDF5Utils.HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME,
+                                        explicitLengthStored, registry);
+                        return explicitLengthStored ? rawSuffix.substring(0, suffixLen) : rawSuffix;
+                    } else
+                    {
+                        return null;
+                    }
+                }
+            };
+        return runner.call(readRunnable);
+    }
+
+    byte[] getAttributeAsByteArray(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId = h5.openAttribute(objectId, attributeName, registry);
+        final int nativeDataTypeId = h5.getNativeDataTypeForAttribute(attributeId, registry);
+        final int dataClass = h5.getClassType(nativeDataTypeId);
+        final int size;
+        if (dataClass == H5T_ARRAY)
+        {
+            final int numberOfElements =
+                    MDAbstractArray.getLength(h5.getArrayDimensions(nativeDataTypeId));
+            final int baseDataType = h5.getBaseDataType(nativeDataTypeId, registry);
+            final int elementSize = h5.getDataTypeSize(baseDataType);
+            size = numberOfElements * elementSize;
+        } else if (dataClass == H5T_STRING)
+        {
+            final int stringDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            size = h5.getDataTypeSize(stringDataTypeId);
+            if (h5.isVariableLengthString(stringDataTypeId))
+            {
+                String[] data = new String[1];
+                h5.readAttributeVL(attributeId, stringDataTypeId, data);
+                return data[0].getBytes();
+            }
+        } else
+        {
+            final int numberOfElements =
+                    MDAbstractArray.getLength(h5.getDataDimensionsForAttribute(attributeId,
+                            registry));
+            final int elementSize = h5.getDataTypeSize(nativeDataTypeId);
+            size = numberOfElements * elementSize;
+        }
+        return h5.readAttributeAsByteArray(attributeId, nativeDataTypeId, size);
+    }
+
+    int openOrCreateBooleanDataType()
+    {
+        final String booleanDataTypePath = getBooleanDataTypePath(houseKeepingNameSuffix);
+        int dataTypeId = getDataTypeId(booleanDataTypePath);
+        if (dataTypeId < 0)
+        {
+            dataTypeId = createBooleanDataType();
+            commitDataType(booleanDataTypePath, dataTypeId);
+        }
+        return dataTypeId;
+    }
+
+    String tryGetDataTypePath(int dataTypeId)
+    {
+        for (DataTypeContainer namedDataType : namedDataTypeList)
+        {
+            if (h5.dataTypesAreEqual(dataTypeId, namedDataType.typeId))
+            {
+                return namedDataType.typePath;
+            }
+        }
+        return h5.tryGetDataTypePath(dataTypeId);
+    }
+
+    void renameNamedDataType(String oldPath, String newPath)
+    {
+        final Integer typeIdOrNull = namedDataTypeMap.remove(oldPath);
+        if (typeIdOrNull != null)
+        {
+            namedDataTypeMap.put(newPath, typeIdOrNull);
+        }
+        for (int i = 0; i < namedDataTypeList.size(); ++i)
+        {
+            final DataTypeContainer c = namedDataTypeList.get(i);
+            if (c.typePath.equals(oldPath))
+            {
+                namedDataTypeList.set(i, new DataTypeContainer(c.typeId, newPath));
+            }
+        }
+    }
+
+    String tryGetDataTypeName(int dataTypeId, HDF5DataClass dataClass)
+    {
+        final String dataTypePathOrNull = tryGetDataTypePath(dataTypeId);
+        return HDF5Utils.tryGetDataTypeNameFromPath(dataTypePathOrNull, houseKeepingNameSuffix,
+                dataClass);
+    }
+
+    int getDataTypeId(final String dataTypePath)
+    {
+        final Integer dataTypeIdOrNull = namedDataTypeMap.get(dataTypePath);
+        if (dataTypeIdOrNull == null)
+        {
+            // Just in case of data types added to other groups than HDF5Utils.DATATYPE_GROUP
+            if (h5.exists(fileId, dataTypePath))
+            {
+                final int dataTypeId = h5.openDataType(fileId, dataTypePath, fileRegistry);
+                namedDataTypeMap.put(dataTypePath, dataTypeId);
+                return dataTypeId;
+            } else
+            {
+                return -1;
+            }
+        } else
+        {
+            return dataTypeIdOrNull;
+        }
+    }
+
+    int createBooleanDataType()
+    {
+        return h5.createDataTypeEnum(new String[]
+            { "FALSE", "TRUE" }, fileRegistry);
+    }
+
+    HDF5EnumerationType openOrCreateTypeVariantDataType()
+    {
+        final String typeVariantTypePath = getTypeVariantDataTypePath(houseKeepingNameSuffix);
+        int dataTypeId = getDataTypeId(typeVariantTypePath);
+        if (dataTypeId < 0)
+        {
+            return createTypeVariantDataType();
+        }
+        final int nativeDataTypeId = h5.getNativeDataType(dataTypeId, fileRegistry);
+        final String[] typeVariantNames = h5.getNamesForEnumOrCompoundMembers(dataTypeId);
+        return new HDF5EnumerationType(fileId, dataTypeId, nativeDataTypeId, typeVariantTypePath,
+                typeVariantNames, this);
+    }
+
+    HDF5EnumerationType createTypeVariantDataType()
+    {
+        final HDF5DataTypeVariant[] typeVariants = HDF5DataTypeVariant.values();
+        final String[] typeVariantNames = new String[typeVariants.length];
+        for (int i = 0; i < typeVariants.length; ++i)
+        {
+            typeVariantNames[i] = typeVariants[i].name();
+        }
+        final int dataTypeId = h5.createDataTypeEnum(typeVariantNames, fileRegistry);
+        final int nativeDataTypeId = h5.getNativeDataType(dataTypeId, fileRegistry);
+        return new HDF5EnumerationType(fileId, dataTypeId, nativeDataTypeId,
+                getTypeVariantDataTypePath(houseKeepingNameSuffix), typeVariantNames, this);
+    }
+
+    void readNamedDataTypes()
+    {
+        final String typeGroup = getDataTypeGroup(houseKeepingNameSuffix);
+        if (h5.exists(fileId, typeGroup) == false)
+        {
+            return;
+        }
+        readNamedDataTypes(typeGroup);
+    }
+
+    private void readNamedDataTypes(String dataTypePath)
+    {
+        for (String dataTypeSubPath : getGroupMemberPaths(dataTypePath))
+        {
+            final HDF5ObjectType type = h5.getObjectTypeInfo(fileId, dataTypeSubPath, false);
+            if (HDF5ObjectType.isGroup(type))
+            {
+                readNamedDataTypes(dataTypeSubPath);
+            } else if (HDF5ObjectType.isDataType(type))
+            {
+                final int dataTypeId = h5.openDataType(fileId, dataTypeSubPath, fileRegistry);
+                namedDataTypeMap.put(dataTypeSubPath, dataTypeId);
+                namedDataTypeList.add(new DataTypeContainer(dataTypeId, dataTypeSubPath));
+            }
+        }
+    }
+
+    void commitDataType(final String dataTypePath, final int dataTypeId)
+    {
+        // Overwrite this method in writer.
+    }
+
+    /**
+     * Class to store the parameters of a 1d data space.
+     */
+    static class DataSpaceParameters
+    {
+        final int memorySpaceId;
+
+        final int dataSpaceId;
+
+        final int blockSize;
+
+        final long[] dimensions;
+
+        DataSpaceParameters(int memorySpaceId, int dataSpaceId, int blockSize, long[] dimensions)
+        {
+            this.memorySpaceId = memorySpaceId;
+            this.dataSpaceId = dataSpaceId;
+            this.blockSize = blockSize;
+            this.dimensions = dimensions;
+        }
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for the given <var>dataSetId</var>.
+     */
+    DataSpaceParameters getSpaceParameters(final int dataSetId, ICleanUpRegistry registry)
+    {
+        long[] dimensions = h5.getDataDimensions(dataSetId, registry);
+        return new DataSpaceParameters(H5S_ALL, H5S_ALL, MDAbstractArray.getLength(dimensions),
+                dimensions);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a 1d block of the given <var>dataSetId</var>, or
+     * <code>null</code>, if the offset is outside of the dataset and
+     * <code>nullWhenOutside == true</code>.
+     */
+    DataSpaceParameters tryGetSpaceParameters(final int dataSetId, final long offset,
+            final int blockSize, boolean nullWhenOutside, ICleanUpRegistry registry)
+    {
+        return tryGetSpaceParameters(dataSetId, 0, offset, blockSize, nullWhenOutside, registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a 1d block of the given <var>dataSetId</var>.
+     */
+    DataSpaceParameters getSpaceParameters(final int dataSetId, final long offset,
+            final int blockSize, ICleanUpRegistry registry)
+    {
+        return tryGetSpaceParameters(dataSetId, 0, offset, blockSize, false, registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a 1d block of the given <var>dataSetId</var>.
+     */
+    DataSpaceParameters getSpaceParameters(final int dataSetId, final long memoryOffset,
+            final long offset, final int blockSize, ICleanUpRegistry registry)
+    {
+        return tryGetSpaceParameters(dataSetId, memoryOffset, offset, blockSize, false, registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a 1d block of the given <var>dataSetId</var>.
+     */
+    DataSpaceParameters tryGetSpaceParameters(final int dataSetId, final long memoryOffset,
+            final long offset, final int blockSize, boolean nullWhenOutside,
+            ICleanUpRegistry registry)
+    {
+        final int memorySpaceId;
+        final int dataSpaceId;
+        final int effectiveBlockSize;
+        final long[] dimensions;
+        if (blockSize > 0)
+        {
+            dataSpaceId = h5.getDataSpaceForDataSet(dataSetId, registry);
+            dimensions = h5.getDataSpaceDimensions(dataSpaceId);
+            if (dimensions.length != 1)
+            {
+                throw new HDF5JavaException("Data Set is expected to be of rank 1 (rank="
+                        + dimensions.length + ")");
+            }
+            final long size = dimensions[0];
+            final long maxFileBlockSize = size - offset;
+            if (maxFileBlockSize <= 0)
+            {
+                if (nullWhenOutside)
+                {
+                    return null;
+                }
+                throw new HDF5JavaException("Offset " + offset + " >= Size " + size);
+            }
+            final long maxMemoryBlockSize = size - memoryOffset;
+            if (maxMemoryBlockSize <= 0)
+            {
+                if (nullWhenOutside)
+                {
+                    return null;
+                }
+                throw new HDF5JavaException("Memory offset " + memoryOffset + " >= Size " + size);
+            }
+            effectiveBlockSize =
+                    (int) Math.min(blockSize, Math.min(maxMemoryBlockSize, maxFileBlockSize));
+            final long[] blockShape = new long[]
+                { effectiveBlockSize };
+            h5.setHyperslabBlock(dataSpaceId, new long[]
+                { offset }, blockShape);
+            memorySpaceId = h5.createSimpleDataSpace(blockShape, registry);
+            h5.setHyperslabBlock(memorySpaceId, new long[]
+                { memoryOffset }, blockShape);
+        } else
+        {
+            memorySpaceId = HDF5Constants.H5S_ALL;
+            dataSpaceId = HDF5Constants.H5S_ALL;
+            dimensions = h5.getDataDimensions(dataSetId, registry);
+            effectiveBlockSize = getOneDimensionalArraySize(dimensions);
+        }
+        return new DataSpaceParameters(memorySpaceId, dataSpaceId, effectiveBlockSize, dimensions);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a multi-dimensional block of the given
+     * <var>dataSetId</var>.
+     */
+    DataSpaceParameters getSpaceParameters(final int dataSetId, final long[] offset,
+            final int[] blockDimensionsOrNull, ICleanUpRegistry registry)
+    {
+        return tryGetSpaceParameters(dataSetId, offset, blockDimensionsOrNull, false, registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a multi-dimensional block of the given
+     * <var>dataSetId</var>.
+     */
+    DataSpaceParameters tryGetSpaceParameters(final int dataSetId, final long[] offset,
+            final int[] blockDimensionsOrNull, boolean nullWhenOutside, ICleanUpRegistry registry)
+    {
+        final int memorySpaceId;
+        final int dataSpaceId;
+        final long[] effectiveBlockDimensions;
+        if (blockDimensionsOrNull != null)
+        {
+            assert offset != null;
+            assert blockDimensionsOrNull.length == offset.length;
+
+            dataSpaceId = h5.getDataSpaceForDataSet(dataSetId, registry);
+            final long[] dimensions = h5.getDataSpaceDimensions(dataSpaceId);
+            if (dimensions.length != blockDimensionsOrNull.length)
+            {
+                throw new HDF5SpaceRankMismatch(blockDimensionsOrNull.length, dimensions.length);
+            }
+            effectiveBlockDimensions = new long[blockDimensionsOrNull.length];
+            for (int i = 0; i < offset.length; ++i)
+            {
+                final long maxBlockSize = dimensions[i] - offset[i];
+                if (maxBlockSize <= 0)
+                {
+                    if (nullWhenOutside)
+                    {
+                        return null;
+                    }
+                    throw new HDF5JavaException("Offset " + offset[i] + " >= Size " + dimensions[i]);
+                }
+                effectiveBlockDimensions[i] =
+                        (blockDimensionsOrNull[i] < 0) ? (int) maxBlockSize : Math.min(
+                                blockDimensionsOrNull[i], maxBlockSize);
+            }
+            h5.setHyperslabBlock(dataSpaceId, offset, effectiveBlockDimensions);
+            memorySpaceId = h5.createSimpleDataSpace(effectiveBlockDimensions, registry);
+        } else
+        {
+            memorySpaceId = H5S_ALL;
+            dataSpaceId = H5S_ALL;
+            effectiveBlockDimensions = h5.getDataDimensions(dataSetId, registry);
+        }
+        return new DataSpaceParameters(memorySpaceId, dataSpaceId,
+                MDAbstractArray.getLength(effectiveBlockDimensions), effectiveBlockDimensions);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for the given <var>dataSetId</var> when they are
+     * mapped to a block in memory.
+     */
+    DataSpaceParameters getBlockSpaceParameters(final int dataSetId, final int[] memoryOffset,
+            final int[] memoryDimensions, ICleanUpRegistry registry)
+    {
+        return tryGetBlockSpaceParameters(dataSetId, memoryOffset, memoryDimensions, false,
+                registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for the given <var>dataSetId</var> when they are
+     * mapped to a block in memory.
+     */
+    DataSpaceParameters tryGetBlockSpaceParameters(final int dataSetId, final int[] memoryOffset,
+            final int[] memoryDimensions, final boolean nullWhenOutside, ICleanUpRegistry registry)
+    {
+        assert memoryOffset != null;
+        assert memoryDimensions != null;
+        assert memoryDimensions.length == memoryOffset.length;
+
+        final long[] dimensions = h5.getDataDimensions(dataSetId, registry);
+        final int memorySpaceId =
+                h5.createSimpleDataSpace(MDAbstractArray.toLong(memoryDimensions), registry);
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            if (dimensions[i] + memoryOffset[i] > memoryDimensions[i])
+            {
+                if (nullWhenOutside)
+                {
+                    return null;
+                }
+                throw new HDF5JavaException("Dimensions " + dimensions[i] + " + memory offset "
+                        + memoryOffset[i] + " >= memory buffer " + memoryDimensions[i]);
+            }
+        }
+        h5.setHyperslabBlock(memorySpaceId, MDAbstractArray.toLong(memoryOffset), dimensions);
+        return new DataSpaceParameters(memorySpaceId, H5S_ALL,
+                MDAbstractArray.getLength(dimensions), dimensions);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a block of the given <var>dataSetId</var> when
+     * they are mapped to a block in memory.
+     */
+    DataSpaceParameters getBlockSpaceParameters(final int dataSetId, final int[] memoryOffset,
+            final int[] memoryDimensions, final long[] offset, final int[] blockDimensions,
+            ICleanUpRegistry registry)
+    {
+        return tryGetBlockSpaceParameters(dataSetId, memoryOffset, memoryDimensions, offset,
+                blockDimensions, false, registry);
+    }
+
+    /**
+     * Returns the {@link DataSpaceParameters} for a block of the given <var>dataSetId</var> when
+     * they are mapped to a block in memory.
+     */
+    DataSpaceParameters tryGetBlockSpaceParameters(final int dataSetId, final int[] memoryOffset,
+            final int[] memoryDimensions, final long[] offset, final int[] blockDimensions,
+            final boolean nullWhenOutside, ICleanUpRegistry registry)
+    {
+        assert memoryOffset != null;
+        assert memoryDimensions != null;
+        assert offset != null;
+        assert blockDimensions != null;
+        assert memoryOffset.length == offset.length;
+        assert memoryDimensions.length == memoryOffset.length;
+        assert blockDimensions.length == offset.length;
+
+        final int memorySpaceId;
+        final int dataSpaceId;
+        final long[] effectiveBlockDimensions;
+
+        dataSpaceId = h5.getDataSpaceForDataSet(dataSetId, registry);
+        final long[] dimensions = h5.getDataSpaceDimensions(dataSpaceId);
+        if (dimensions.length != blockDimensions.length)
+        {
+            throw new HDF5JavaException("Data Set is expected to be of rank "
+                    + blockDimensions.length + " (rank=" + dimensions.length + ")");
+        }
+        effectiveBlockDimensions = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            final long maxFileBlockSize = dimensions[i] - offset[i];
+            if (maxFileBlockSize <= 0)
+            {
+                if (nullWhenOutside)
+                {
+                    return null;
+                }
+                throw new HDF5JavaException("Offset " + offset[i] + " >= Size " + dimensions[i]);
+            }
+            final long maxMemoryBlockSize = memoryDimensions[i] - memoryOffset[i];
+            if (maxMemoryBlockSize <= 0)
+            {
+                if (nullWhenOutside)
+                {
+                    return null;
+                }
+                throw new HDF5JavaException("Memory offset " + memoryOffset[i] + " >= Size "
+                        + memoryDimensions[i]);
+            }
+            effectiveBlockDimensions[i] =
+                    Math.min(blockDimensions[i], Math.min(maxMemoryBlockSize, maxFileBlockSize));
+        }
+        h5.setHyperslabBlock(dataSpaceId, offset, effectiveBlockDimensions);
+        memorySpaceId =
+                h5.createSimpleDataSpace(MDAbstractArray.toLong(memoryDimensions), registry);
+        h5.setHyperslabBlock(memorySpaceId, MDAbstractArray.toLong(memoryOffset),
+                effectiveBlockDimensions);
+        return new DataSpaceParameters(memorySpaceId, dataSpaceId,
+                MDAbstractArray.getLength(effectiveBlockDimensions), effectiveBlockDimensions);
+    }
+
+    /**
+     * Returns the native data type for the given <var>dataSetId</var>, or
+     * <var>overrideDataTypeId</var>, if it is not negative.
+     */
+    int getNativeDataTypeId(final int dataSetId, final int overrideDataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int nativeDataTypeId;
+        if (overrideDataTypeId < 0)
+        {
+            nativeDataTypeId = h5.getNativeDataTypeForDataSet(dataSetId, registry);
+        } else
+        {
+            nativeDataTypeId = overrideDataTypeId;
+        }
+        return nativeDataTypeId;
+    }
+
+    /**
+     * Returns the members of <var>groupPath</var>. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    List<String> getGroupMembers(final String groupPath)
+    {
+        assert groupPath != null;
+        return removeInternalNames(getAllGroupMembers(groupPath), houseKeepingNameSuffix, false);
+    }
+
+    /**
+     * Returns all members of <var>groupPath</var>, including internal groups that may be used by
+     * the library to do house-keeping. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    List<String> getAllGroupMembers(final String groupPath)
+    {
+        final String[] groupMemberArray = h5.getGroupMembers(fileId, groupPath);
+        return new LinkedList<String>(Arrays.asList(groupMemberArray));
+    }
+
+    /**
+     * Returns the paths of the members of <var>groupPath</var> (including the parent). The order is
+     * <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the member paths for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    List<String> getGroupMemberPaths(final String groupPath)
+    {
+        final String superGroupName = (groupPath.equals("/") ? "/" : groupPath + "/");
+        final List<String> memberNames = getGroupMembers(groupPath);
+        for (int i = 0; i < memberNames.size(); ++i)
+        {
+            memberNames.set(i, superGroupName + memberNames.get(i));
+        }
+        return memberNames;
+    }
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object. It is a
+     * failure condition if the <var>dataSetPath</var> does not exist or does not identify a data
+     * set. <br>
+     * <i>Does not read the data type path of a committed data type.</i>
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     */
+    HDF5DataSetInformation getDataSetInformation(final String dataSetPath)
+    {
+        return getDataSetInformation(dataSetPath, DataTypeInfoOptions.DEFAULT, true);
+    }
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object. It is a
+     * failure condition if the <var>dataSetPath</var> does not exist or does not identify a data
+     * set.
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     * @param options What information to obtain about the data type.
+     * @param fillDimensions If <code>true</code>, fill in the dimensions of the dataset
+     */
+    HDF5DataSetInformation getDataSetInformation(final String dataSetPath,
+            final DataTypeInfoOptions options, final boolean fillDimensions)
+    {
+        assert dataSetPath != null;
+
+        final ICallableWithCleanUp<HDF5DataSetInformation> informationDeterminationRunnable =
+                new ICallableWithCleanUp<HDF5DataSetInformation>()
+                    {
+                        @Override
+                        public HDF5DataSetInformation call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId = h5.openDataSet(fileId, dataSetPath, registry);
+                            final int dataTypeId = h5.getDataTypeForDataSet(dataSetId, registry);
+                            final HDF5DataTypeInformation dataTypeInfo =
+                                    getDataTypeInformation(dataTypeId, options, registry);
+                            final HDF5DataTypeVariant variantOrNull =
+                                    options.knowsDataTypeVariant() ? tryGetTypeVariant(dataSetId,
+                                            registry) : null;
+                            final HDF5DataSetInformation dataSetInfo =
+                                    new HDF5DataSetInformation(dataTypeInfo, variantOrNull);
+                            // Is it a variable-length string?
+                            final boolean vlString =
+                                    (dataTypeInfo.getDataClass() == HDF5DataClass.STRING && h5
+                                            .isVariableLengthString(dataTypeId));
+                            if (vlString)
+                            {
+                                dataTypeInfo.setElementSize(-1);
+                            }
+                            if (fillDimensions)
+                            {
+                                h5.fillDataDimensions(dataSetId, false, dataSetInfo, registry);
+                            }
+                            return dataSetInfo;
+                        }
+                    };
+        return runner.call(informationDeterminationRunnable);
+    }
+
+    /**
+     * Returns the dimensions of the data set. It is a failure condition if the
+     * <var>dataSetPath</var> does not exist or does not identify a data set.
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     */
+    long[] getSpaceDimensions(final String dataSetPath)
+    {
+        assert dataSetPath != null;
+
+        final ICallableWithCleanUp<long[]> informationDeterminationRunnable =
+                new ICallableWithCleanUp<long[]>()
+                    {
+                        @Override
+                        public long[] call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId = h5.openDataSet(fileId, dataSetPath, registry);
+                            return h5.getDimensions(dataSetId, false, registry);
+                        }
+                    };
+        return runner.call(informationDeterminationRunnable);
+    }
+
+    /**
+     * Returns the rank of this data set of <var>objectPath</var>. This combines the space rank and
+     * the array rank into one rank. It is a failure condition if the <var>objectPath</var> does not
+     * exist or does not identify a data set. This method follows symbolic links.
+     */
+    int getRank(String dataSetPath)
+    {
+        final HDF5DataSetInformation info =
+                getDataSetInformation(dataSetPath, DataTypeInfoOptions.MINIMAL, true);
+        return info.getRank() + info.getTypeInformation().getRank();
+        
+    }
+    
+    /**
+     * Returns the dimensions of the space of <var>objectPath</var> (empty if this is a scalar
+     * space). It is a failure condition if the <var>objectPath</var> does not exist or does not
+     * identify a data set. This method follows symbolic links.
+     */
+    long[] getDimensions(String dataSetPath)
+    {
+        assert dataSetPath != null;
+
+        final HDF5DataSetInformation info =
+                getDataSetInformation(dataSetPath, DataTypeInfoOptions.MINIMAL, true);
+        return MatrixUtils.concat(info.getDimensions(), info.getTypeInformation().getDimensions());
+    }
+
+    /**
+     * Returns the rank of the data set. It is a failure condition if the <var>dataSetPath</var>
+     * does not exist or does not identify a data set.
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     */
+    int getSpaceRank(final String dataSetPath)
+    {
+        assert dataSetPath != null;
+
+        final ICallableWithCleanUp<Integer> informationDeterminationRunnable =
+                new ICallableWithCleanUp<Integer>()
+                    {
+                        @Override
+                        public Integer call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId = h5.openDataSet(fileId, dataSetPath, registry);
+                            return h5.getRank(dataSetId, false, registry);
+                        }
+                    };
+        return runner.call(informationDeterminationRunnable);
+    }
+
+    HDF5DataTypeVariant tryGetTypeVariant(final String objectPath)
+    {
+        assert objectPath != null;
+
+        final ICallableWithCleanUp<HDF5DataTypeVariant> readRunnable =
+                new ICallableWithCleanUp<HDF5DataTypeVariant>()
+                    {
+                        @Override
+                        public HDF5DataTypeVariant call(ICleanUpRegistry registry)
+                        {
+                            final int objectId = h5.openObject(fileId, objectPath, registry);
+                            return tryGetTypeVariant(objectId, registry);
+                        }
+                    };
+
+        return runner.call(readRunnable);
+    }
+
+    HDF5DataTypeVariant tryGetTypeVariant(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+
+        final ICallableWithCleanUp<HDF5DataTypeVariant> readRunnable =
+                new ICallableWithCleanUp<HDF5DataTypeVariant>()
+                    {
+                        @Override
+                        public HDF5DataTypeVariant call(ICleanUpRegistry registry)
+                        {
+                            final int objectId = h5.openObject(fileId, objectPath, registry);
+                            return tryGetTypeVariant(objectId, attributeName, registry);
+                        }
+                    };
+
+        return runner.call(readRunnable);
+    }
+
+    HDF5EnumerationValueArray getEnumValueArray(final int attributeId, final String objectPath,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        final int storageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+        final int nativeDataTypeId = h5.getNativeDataType(storageDataTypeId, registry);
+        final int len;
+        final int enumTypeId;
+        if (h5.getClassType(storageDataTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions = h5.getArrayDimensions(storageDataTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                        + objectPath + "' is not an array of rank 1, but is of rank "
+                        + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            enumTypeId = h5.getBaseDataType(storageDataTypeId, registry);
+            if (h5.getClassType(enumTypeId) != H5T_ENUM)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                        + objectPath + "' is not of type enumeration array.");
+            }
+        } else
+        {
+            if (h5.getClassType(storageDataTypeId) != H5T_ENUM)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                        + objectPath + "' is not of type enumeration array.");
+            }
+            enumTypeId = storageDataTypeId;
+            final long[] arrayDimensions = h5.getDataDimensionsForAttribute(attributeId, registry);
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final HDF5EnumerationType enumType =
+                getEnumTypeForEnumDataType(null, enumTypeId, true, fileRegistry);
+        final byte[] data =
+                h5.readAttributeAsByteArray(attributeId, nativeDataTypeId, len
+                        * enumType.getStorageForm().getStorageSize());
+        final HDF5EnumerationValueArray value =
+                new HDF5EnumerationValueArray(enumType, EnumerationType.fromStorageForm(data,
+                        enumType.getStorageForm()));
+        return value;
+    }
+
+    HDF5EnumerationValueMDArray getEnumValueMDArray(final int attributeId, final String objectPath,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        final int storageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+        final int nativeDataTypeId = h5.getNativeDataType(storageDataTypeId, registry);
+        final int len;
+        final int enumTypeId;
+        final int[] arrayDimensions;
+        if (h5.getClassType(storageDataTypeId) == H5T_ARRAY)
+        {
+            arrayDimensions = h5.getArrayDimensions(storageDataTypeId);
+            len = MDAbstractArray.getLength(arrayDimensions);
+            enumTypeId = h5.getBaseDataType(storageDataTypeId, registry);
+            if (h5.getClassType(enumTypeId) != H5T_ENUM)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                        + objectPath + "' is not of type enumeration array.");
+            }
+        } else
+        {
+            if (h5.getClassType(storageDataTypeId) != H5T_ENUM)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                        + objectPath + "' is not of type enumeration array.");
+            }
+            enumTypeId = storageDataTypeId;
+            arrayDimensions =
+                    MDAbstractArray.toInt(h5.getDataDimensionsForAttribute(attributeId, registry));
+            len = MDAbstractArray.getLength(arrayDimensions);
+        }
+        final HDF5EnumerationType enumType =
+                getEnumTypeForEnumDataType(null, enumTypeId, true, fileRegistry);
+        final byte[] data =
+                h5.readAttributeAsByteArray(attributeId, nativeDataTypeId, len
+                        * enumType.getStorageForm().getStorageSize());
+        final HDF5EnumerationValueMDArray value =
+                new HDF5EnumerationValueMDArray(enumType, EnumerationType.fromStorageForm(data,
+                        arrayDimensions, enumType.getStorageForm()));
+        return value;
+    }
+
+    int getEnumDataTypeId(final int storageDataTypeId, ICleanUpRegistry registry)
+    {
+        final int enumDataTypeId;
+        if (h5.getClassType(storageDataTypeId) == H5T_ARRAY)
+        {
+            enumDataTypeId = h5.getBaseDataType(storageDataTypeId, registry);
+        } else
+        {
+            enumDataTypeId = storageDataTypeId;
+        }
+        return enumDataTypeId;
+    }
+
+    HDF5DataTypeVariant[] tryGetTypeVariantForCompoundMembers(String dataTypePathOrNull,
+            ICleanUpRegistry registry)
+    {
+        if (dataTypePathOrNull == null)
+        {
+            return null;
+        }
+        checkOpen();
+        final int objectId = h5.openObject(fileId, dataTypePathOrNull, registry);
+        final String typeVariantMembersAttributeName =
+                getTypeVariantMembersAttributeName(houseKeepingNameSuffix);
+        if (h5.existsAttribute(objectId, typeVariantMembersAttributeName) == false)
+        {
+            return null;
+        }
+        final int attributeId =
+                h5.openAttribute(objectId, typeVariantMembersAttributeName, registry);
+        final HDF5EnumerationValueArray valueArray =
+                getEnumValueArray(attributeId, dataTypePathOrNull, typeVariantMembersAttributeName,
+                        registry);
+        final HDF5DataTypeVariant[] variants = new HDF5DataTypeVariant[valueArray.getLength()];
+        boolean hasVariants = false;
+        for (int i = 0; i < variants.length; ++i)
+        {
+            variants[i] = HDF5DataTypeVariant.values()[valueArray.getOrdinal(i)];
+            hasVariants |= variants[i].isTypeVariant();
+        }
+        if (hasVariants)
+        {
+            return variants;
+        } else
+        {
+            return null;
+        }
+    }
+
+    HDF5DataTypeVariant tryGetTypeVariant(final int objectId, ICleanUpRegistry registry)
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(objectId, registry);
+        return typeVariantOrdinal < 0 ? null : HDF5DataTypeVariant.values()[typeVariantOrdinal];
+    }
+
+    HDF5DataTypeVariant tryGetTypeVariant(final int objectId, String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(objectId, attributeName, registry);
+        return typeVariantOrdinal < 0 ? null : HDF5DataTypeVariant.values()[typeVariantOrdinal];
+    }
+
+    /**
+     * Returns the ordinal for the type variant of <var>objectPath</var>, or <code>-1</code>, if no
+     * type variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectId The id of the data set object in the file.
+     * @return The ordinal of the type variant or <code>null</code>.
+     */
+    int getAttributeTypeVariant(final int objectId, ICleanUpRegistry registry)
+    {
+        checkOpen();
+        final String dataTypeVariantAttributeName =
+                createObjectTypeVariantAttributeName(houseKeepingNameSuffix);
+        if (h5.existsAttribute(objectId, dataTypeVariantAttributeName) == false)
+        {
+            return -1;
+        }
+        final int attributeId = h5.openAttribute(objectId, dataTypeVariantAttributeName, registry);
+        return getEnumOrdinal(attributeId, -1, typeVariantDataType);
+    }
+
+    /**
+     * Returns the ordinal for the type variant of <var>objectPath</var>, or <code>-1</code>, if no
+     * type variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectId The id of the data set object in the file.
+     * @param attributeName The name of the attribute to get the type variant for.
+     * @return The ordinal of the type variant or <code>null</code>.
+     */
+    int getAttributeTypeVariant(final int objectId, String attributeName, ICleanUpRegistry registry)
+    {
+        checkOpen();
+        final String typeVariantAttrName =
+                createAttributeTypeVariantAttributeName(attributeName, houseKeepingNameSuffix);
+        if (h5.existsAttribute(objectId, typeVariantAttrName) == false)
+        {
+            return -1;
+        }
+        final int attributeId = h5.openAttribute(objectId, typeVariantAttrName, registry);
+        return getEnumOrdinal(attributeId, -1, typeVariantDataType);
+    }
+
+    int getEnumOrdinal(final int attributeId, int nativeDataTypeId,
+            final HDF5EnumerationType enumType)
+    {
+        final byte[] data =
+                h5.readAttributeAsByteArray(attributeId,
+                        (nativeDataTypeId < 0) ? enumType.getNativeTypeId() : nativeDataTypeId,
+                        enumType.getStorageForm().getStorageSize());
+        return EnumerationType.fromStorageForm(data);
+    }
+
+    /**
+     * Returns the explicitly saved string length for attribute
+     * <var>HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME</var> of <var>objectId</var>, or <code>-1</code>
+     * , if no explicit string length is defined for this attribute.
+     * 
+     * @param objectId The id of the data set object in the file.
+     * @return The length of the string attribute or -1.
+     */
+    private int getHousekeepingAttributeExplicitStringLength(final int objectId,
+            ICleanUpRegistry registry)
+    {
+        if (h5.existsAttribute(objectId, HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME) == false)
+        {
+            return -1;
+        }
+        final int attributeId =
+                h5.openAttribute(objectId, HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME,
+                        registry);
+        final int[] data = h5.readAttributeAsIntArray(attributeId, H5T_NATIVE_INT32, 1);
+        return data[0];
+    }
+
+    HDF5DataTypeInformation getDataTypeInformation(final int dataTypeId,
+            final DataTypeInfoOptions options)
+    {
+        final ICallableWithCleanUp<HDF5DataTypeInformation> informationDeterminationRunnable =
+                new ICallableWithCleanUp<HDF5DataTypeInformation>()
+                    {
+                        @Override
+                        public HDF5DataTypeInformation call(ICleanUpRegistry registry)
+                        {
+                            final HDF5DataTypeInformation dataTypeInfo =
+                                    getDataTypeInformation(dataTypeId, options, registry);
+                            // Is it a variable-length string?
+                            final boolean vlString =
+                                    (dataTypeInfo.getDataClass() == HDF5DataClass.STRING && h5
+                                            .isVariableLengthString(dataTypeId));
+                            if (vlString)
+                            {
+                                dataTypeInfo.setElementSize(-1);
+                            }
+                            return dataTypeInfo;
+                        }
+                    };
+        return runner.call(informationDeterminationRunnable);
+    }
+
+    HDF5DataTypeInformation getDataTypeInformation(final int dataTypeId,
+            final DataTypeInfoOptions options, final ICleanUpRegistry registry)
+    {
+        final int classTypeId = h5.getClassType(dataTypeId);
+        final HDF5DataClass dataClass;
+        final int totalSize = h5.getDataTypeSize(dataTypeId);
+        if (classTypeId == H5T_ARRAY)
+        {
+            dataClass = getElementClassForArrayDataType(dataTypeId);
+            final int[] arrayDimensions = h5.getArrayDimensions(dataTypeId);
+            final int numberOfElements = MDAbstractArray.getLength(arrayDimensions);
+            final int size = totalSize / numberOfElements;
+            final int baseTypeId = h5.getBaseDataType(dataTypeId, registry);
+            final boolean signed =
+                    (dataClass == HDF5DataClass.INTEGER) ? h5.getSigned(baseTypeId)
+                            : (dataClass == HDF5DataClass.FLOAT);
+            final String dataTypePathOrNull =
+                    options.knowsDataTypePath() ? tryGetDataTypePath(baseTypeId) : null;
+            final CharacterEncoding dataSetEncoding =
+                    (dataClass == HDF5DataClass.STRING) ? h5.getCharacterEncoding(baseTypeId)
+                            : CharacterEncoding.ASCII;
+            final boolean variableLengthString =
+                    (dataClass == HDF5DataClass.STRING) ? h5.isVariableLengthString(baseTypeId)
+                            : false;
+            return new HDF5DataTypeInformation(dataTypePathOrNull, options, dataClass,
+                    dataSetEncoding, houseKeepingNameSuffix, size, arrayDimensions, true, signed,
+                    variableLengthString);
+        } else
+        {
+            dataClass = getDataClassForClassType(classTypeId, dataTypeId);
+            final String opaqueTagOrNull;
+            if (dataClass == HDF5DataClass.OPAQUE)
+            {
+                opaqueTagOrNull = h5.tryGetOpaqueTag(dataTypeId);
+            } else
+            {
+                opaqueTagOrNull = null;
+            }
+            final String dataTypePathOrNull =
+                    options.knowsDataTypePath() ? tryGetDataTypePath(dataTypeId) : null;
+            final boolean signed =
+                    (dataClass == HDF5DataClass.INTEGER) ? h5.getSigned(dataTypeId)
+                            : (dataClass == HDF5DataClass.FLOAT);
+            final CharacterEncoding dataSetEncoding =
+                    (dataClass == HDF5DataClass.STRING) ? h5.getCharacterEncoding(dataTypeId)
+                            : CharacterEncoding.ASCII;
+            final boolean variableLengthString =
+                    (dataClass == HDF5DataClass.STRING) ? h5.isVariableLengthString(dataTypeId)
+                            : false;
+            return new HDF5DataTypeInformation(dataTypePathOrNull, options, dataClass,
+                    dataSetEncoding, houseKeepingNameSuffix, totalSize, signed,
+                    variableLengthString, opaqueTagOrNull);
+        }
+    }
+
+    private HDF5DataClass getDataClassForClassType(final int classTypeId, final int dataTypeId)
+    {
+        HDF5DataClass dataClass = HDF5DataClass.classIdToDataClass(classTypeId);
+        // Is it a boolean?
+        if (dataClass == HDF5DataClass.ENUM && h5.dataTypesAreEqual(dataTypeId, booleanDataTypeId))
+        {
+            dataClass = HDF5DataClass.BOOLEAN;
+        }
+        return dataClass;
+    }
+
+    private HDF5DataClass getElementClassForArrayDataType(final int arrayDataTypeId)
+    {
+        for (HDF5DataClass eClass : HDF5DataClass.values())
+        {
+            if (h5.hasClassType(arrayDataTypeId, eClass.getId()))
+            {
+                return eClass;
+            }
+        }
+        return HDF5DataClass.OTHER;
+    }
+
+    //
+    // Compound
+    //
+
+    String getCompoundDataTypeName(final String nameOrNull, final int dataTypeId)
+    {
+        return getDataTypeName(nameOrNull, HDF5DataClass.COMPOUND, dataTypeId);
+    }
+
+    <T> HDF5ValueObjectByteifyer<T> createCompoundByteifyers(final Class<T> compoundClazz,
+            final HDF5CompoundMemberMapping[] compoundMembers,
+            final CompoundTypeInformation compoundTypeInfoOrNull)
+    {
+        final HDF5ValueObjectByteifyer<T> objectByteifyer =
+                new HDF5ValueObjectByteifyer<T>(compoundClazz,
+                        new HDF5ValueObjectByteifyer.IFileAccessProvider()
+                            {
+                                @Override
+                                public int getBooleanDataTypeId()
+                                {
+                                    return booleanDataTypeId;
+                                }
+
+                                @Override
+                                public int getStringDataTypeId(int maxLength)
+                                {
+                                    final int typeId =
+                                            h5.createDataTypeString(maxLength, fileRegistry);
+                                    return typeId;
+                                }
+
+                                @Override
+                                public int getArrayTypeId(int baseTypeId, int length)
+                                {
+                                    final int typeId =
+                                            h5.createArrayType(baseTypeId, length, fileRegistry);
+                                    return typeId;
+                                }
+
+                                @Override
+                                public int getArrayTypeId(int baseTypeId, int[] dimensions)
+                                {
+                                    final int typeId =
+                                            h5.createArrayType(baseTypeId, dimensions, fileRegistry);
+                                    return typeId;
+                                }
+
+                                @Override
+                                public CharacterEncoding getCharacterEncoding(int dataTypeId)
+                                {
+                                    return (dataTypeId < 0) ? encodingForNewDataSets : h5
+                                            .getCharacterEncoding(dataTypeId);
+                                }
+
+                                @Override
+                                public HDF5EnumerationType getEnumType(String[] options)
+                                {
+                                    final int storageDataTypeId =
+                                            h5.createDataTypeEnum(options, fileRegistry);
+                                    final int nativeDataTypeId =
+                                            h5.getNativeDataType(storageDataTypeId, fileRegistry);
+                                    return new HDF5EnumerationType(fileId, storageDataTypeId,
+                                            nativeDataTypeId, null, options, HDF5BaseReader.this);
+                                }
+
+                                @Override
+                                public int getVariableLengthStringDataTypeId()
+                                {
+                                    return variableLengthStringDataTypeId;
+                                }
+
+                                @Override
+                                public byte[] createObjectReference(String referencedObjectPath)
+                                {
+                                    return h5.createObjectReference(fileId, referencedObjectPath);
+                                }
+                            }, compoundTypeInfoOrNull, compoundMembers);
+        return objectByteifyer;
+    }
+
+    int createStorageCompoundDataType(HDF5ValueObjectByteifyer<?> objectArrayifyer)
+    {
+        final int storageDataTypeId =
+                h5.createDataTypeCompound(objectArrayifyer.getRecordSizeOnDisk(), fileRegistry);
+        objectArrayifyer.insertMemberTypes(storageDataTypeId);
+        return storageDataTypeId;
+    }
+
+    int createNativeCompoundDataType(HDF5ValueObjectByteifyer<?> objectArrayifyer)
+    {
+        final int nativeDataTypeId =
+                h5.createDataTypeCompound(objectArrayifyer.getRecordSizeInMemory(), fileRegistry);
+        objectArrayifyer.insertNativeMemberTypes(nativeDataTypeId, h5, fileRegistry);
+        return nativeDataTypeId;
+    }
+
+    //
+    // Enum
+    //
+
+    HDF5EnumerationType getEnumTypeForStorageDataType(final String nameOrNull,
+            final int storageDataTypeId, final boolean resolveName, final String objectPathOrNull,
+            final String attributeNameOrNull, final ICleanUpRegistry registry)
+    {
+        int classType = h5.getClassType(storageDataTypeId);
+        final boolean isArray = (classType == H5T_ARRAY);
+        final int enumStoreDataTypeId;
+        if (isArray)
+        {
+            enumStoreDataTypeId = h5.getBaseDataType(storageDataTypeId, registry);
+            classType = h5.getClassType(enumStoreDataTypeId);
+        } else
+        {
+            enumStoreDataTypeId = storageDataTypeId;
+        }
+        if (classType != H5T_ENUM)
+        {
+            if (attributeNameOrNull != null)
+            {
+                throw new HDF5JavaException("Attribute '" + attributeNameOrNull + "' of object '"
+                        + objectPathOrNull + "' is not of enum type.");
+            } else if (objectPathOrNull != null)
+            {
+                throw new HDF5JavaException("Object '" + objectPathOrNull
+                        + "' is not of enum type.");
+            } else
+            {
+                throw new HDF5JavaException("Type '" + (nameOrNull != null ? nameOrNull : "???")
+                        + "' is not of enum type.");
+            }
+        }
+        return getEnumTypeForEnumDataType(nameOrNull, enumStoreDataTypeId, resolveName, registry);
+    }
+
+    HDF5EnumerationType getEnumTypeForEnumDataType(final String nameOrNull,
+            final int enumStoreDataTypeId, final boolean resolveName,
+            final ICleanUpRegistry registry)
+    {
+        final int nativeDataTypeId = h5.getNativeDataType(enumStoreDataTypeId, registry);
+        final String[] values = h5.getNamesForEnumOrCompoundMembers(enumStoreDataTypeId);
+        return new HDF5EnumerationType(fileId, enumStoreDataTypeId, nativeDataTypeId,
+                resolveName ? getEnumDataTypeName(nameOrNull, enumStoreDataTypeId) : nameOrNull,
+                values, this);
+    }
+
+    void checkEnumValues(int dataTypeId, final String[] values, final String nameOrNull)
+    {
+        final String[] valuesStored = h5.getNamesForEnumOrCompoundMembers(dataTypeId);
+        if (valuesStored.length != values.length)
+        {
+            throw new IllegalStateException("Enum " + getEnumDataTypeName(nameOrNull, dataTypeId)
+                    + " has " + valuesStored.length + " members, but should have " + values.length);
+        }
+        for (int i = 0; i < values.length; ++i)
+        {
+            if (values[i].equals(valuesStored[i]) == false)
+            {
+                throw new HDF5JavaException("Enum member index " + i + " of enum "
+                        + getEnumDataTypeName(nameOrNull, dataTypeId) + " is '" + valuesStored[i]
+                        + "', but should be '" + values[i] + "'");
+            }
+        }
+    }
+
+    String getEnumDataTypeName(final String nameOrNull, final int dataTypeId)
+    {
+        return getDataTypeName(nameOrNull, HDF5DataClass.ENUM, dataTypeId);
+    }
+
+    private String getDataTypeName(final String nameOrNull, final HDF5DataClass dataClass,
+            final int dataTypeId)
+    {
+        if (nameOrNull != null)
+        {
+            return nameOrNull;
+        } else
+        {
+            final String nameFromPathOrNull =
+                    HDF5Utils.tryGetDataTypeNameFromPath(tryGetDataTypePath(dataTypeId),
+                            houseKeepingNameSuffix, dataClass);
+            return (nameFromPathOrNull == null) ? "UNKNOWN" : nameFromPathOrNull;
+        }
+    }
+
+    boolean isScaledEnum(final int objectId, final ICleanUpRegistry registry)
+    {
+        final HDF5DataTypeVariant typeVariantOrNull = tryGetTypeVariant(objectId, registry);
+        return (HDF5DataTypeVariant.ENUM == typeVariantOrNull);
+    }
+
+    boolean isScaledBitField(final int objectId, final ICleanUpRegistry registry)
+    {
+        final HDF5DataTypeVariant typeVariantOrNull = tryGetTypeVariant(objectId, registry);
+        return (HDF5DataTypeVariant.BITFIELD == typeVariantOrNull);
+    }
+
+    //
+    // String
+    //
+
+    private int openOrCreateVLStringType()
+    {
+        final String variableLengthStringTypePath =
+                getVariableLengthStringDataTypePath(houseKeepingNameSuffix);
+        int dataTypeId = getDataTypeId(variableLengthStringTypePath);
+        if (dataTypeId < 0)
+        {
+            dataTypeId = h5.createDataTypeVariableString(fileRegistry);
+            commitDataType(variableLengthStringTypePath, dataTypeId);
+        }
+        return dataTypeId;
+    }
+
+    String getStringAttribute(final int objectId, final String objectPath,
+            final String attributeName, final boolean readRaw, final ICleanUpRegistry registry)
+    {
+        final int attributeId = h5.openAttribute(objectId, attributeName, registry);
+        final int stringDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+        final boolean isString = (h5.getClassType(stringDataTypeId) == H5T_STRING);
+        if (isString == false)
+        {
+            throw new IllegalArgumentException("Attribute " + attributeName + " of object "
+                    + objectPath + " needs to be a String.");
+        }
+        final int size = h5.getDataTypeSize(stringDataTypeId);
+        if (h5.isVariableLengthString(stringDataTypeId))
+        {
+            String[] data = new String[1];
+            h5.readAttributeVL(attributeId, stringDataTypeId, data);
+            return data[0];
+        } else
+        {
+            final CharacterEncoding dataSetEncoding = h5.getCharacterEncoding(stringDataTypeId);
+            final byte[] data = h5.readAttributeAsByteArray(attributeId, stringDataTypeId, size);
+            return (readRaw ? StringUtils.fromBytes(data, dataSetEncoding) : StringUtils
+                    .fromBytes0Term(data, dataSetEncoding));
+        }
+    }
+
+    String[] getStringArrayAttribute(final int objectId, final String objectPath,
+            final String attributeName, final boolean readRaw, final ICleanUpRegistry registry)
+    {
+        final int attributeId = h5.openAttribute(objectId, attributeName, registry);
+        final int stringArrayDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+        final boolean isArray = (h5.getClassType(stringArrayDataTypeId) == H5T_ARRAY);
+        if (isArray == false)
+        {
+            throw new HDF5JavaException("Attribute " + attributeName + " of object " + objectPath
+                    + " needs to be a String array of rank 1.");
+        }
+        final int stringDataTypeId = h5.getBaseDataType(stringArrayDataTypeId, registry);
+        final boolean isStringArray = (h5.getClassType(stringDataTypeId) == H5T_STRING);
+        if (isStringArray == false)
+        {
+            throw new HDF5JavaException("Attribute " + attributeName + " of object " + objectPath
+                    + " needs to be a String array of rank 1.");
+        }
+        final int size = h5.getDataTypeSize(stringArrayDataTypeId);
+        if (h5.isVariableLengthString(stringDataTypeId))
+        {
+            String[] data = new String[1];
+            h5.readAttributeVL(attributeId, stringDataTypeId, data);
+            return data;
+        } else
+        {
+            final CharacterEncoding dataSetEncoding =
+                    h5.getCharacterEncoding(stringArrayDataTypeId);
+            byte[] data = h5.readAttributeAsByteArray(attributeId, stringArrayDataTypeId, size);
+            final int[] arrayDimensions = h5.getArrayDimensions(stringArrayDataTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException("Attribute " + attributeName + " of object "
+                        + objectPath + " needs to be a String array of rank 1.");
+            }
+            final int lengthPerElement = h5.getDataTypeSize(stringDataTypeId);
+            final int numberOfElements = arrayDimensions[0];
+            final String[] result = new String[numberOfElements];
+            for (int i = 0, startIdx = 0, endIdx = lengthPerElement; i < numberOfElements; ++i, startIdx +=
+                    lengthPerElement, endIdx += lengthPerElement)
+            {
+                result[i] =
+                        readRaw ? StringUtils.fromBytes(data, startIdx, endIdx, dataSetEncoding)
+                                : StringUtils.fromBytes0Term(data, startIdx, endIdx,
+                                        dataSetEncoding);
+            }
+            return result;
+        }
+    }
+
+    MDArray<String> getStringMDArrayAttribute(final int objectId, final String objectPath,
+            final String attributeName, final boolean readRaw, final ICleanUpRegistry registry)
+    {
+        final int attributeId = h5.openAttribute(objectId, attributeName, registry);
+        final int stringArrayDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+        final boolean isArray = (h5.getClassType(stringArrayDataTypeId) == H5T_ARRAY);
+        if (isArray == false)
+        {
+            throw new HDF5JavaException("Attribute " + attributeName + " of object " + objectPath
+                    + " needs to be a String array.");
+        }
+        final int stringDataTypeId = h5.getBaseDataType(stringArrayDataTypeId, registry);
+        final boolean isStringArray = (h5.getClassType(stringDataTypeId) == H5T_STRING);
+        if (isStringArray == false)
+        {
+            throw new HDF5JavaException("Attribute " + attributeName + " of object " + objectPath
+                    + " needs to be a String array.");
+        }
+        final int size = h5.getDataTypeSize(stringArrayDataTypeId);
+        if (h5.isVariableLengthString(stringDataTypeId))
+        {
+            String[] data = new String[1];
+            h5.readAttributeVL(attributeId, stringDataTypeId, data);
+            return new MDArray<String>(data, new int[]
+                { 1 });
+        } else
+        {
+            final byte[] data =
+                    h5.readAttributeAsByteArray(attributeId, stringArrayDataTypeId, size);
+            final CharacterEncoding dataSetEncoding =
+                    h5.getCharacterEncoding(stringArrayDataTypeId);
+            final int[] arrayDimensions = h5.getArrayDimensions(stringArrayDataTypeId);
+            final int lengthPerElement = h5.getDataTypeSize(stringDataTypeId);
+            final int numberOfElements = MDAbstractArray.getLength(arrayDimensions);
+            final String[] result = new String[numberOfElements];
+            for (int i = 0, startIdx = 0, endIdx = lengthPerElement; i < numberOfElements; ++i, startIdx +=
+                    lengthPerElement, endIdx += lengthPerElement)
+            {
+                result[i] =
+                        readRaw ? StringUtils.fromBytes(data, startIdx, endIdx, dataSetEncoding)
+                                : StringUtils.fromBytes0Term(data, startIdx, endIdx,
+                                        dataSetEncoding);
+            }
+            return new MDArray<String>(result, arrayDimensions);
+        }
+    }
+
+    // Date & Time
+
+    void checkIsTimeStamp(final String objectPath, final int dataSetId, ICleanUpRegistry registry)
+            throws HDF5JavaException
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(dataSetId, registry);
+        if (typeVariantOrdinal != HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH
+                .ordinal())
+        {
+            throw new HDF5JavaException("Data set '" + objectPath + "' is not a time stamp.");
+        }
+    }
+
+    void checkIsTimeStamp(final String objectPath, final String attributeName, final int dataSetId,
+            ICleanUpRegistry registry) throws HDF5JavaException
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(dataSetId, attributeName, registry);
+        if (typeVariantOrdinal != HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH
+                .ordinal())
+        {
+            throw new HDF5JavaException("Attribute '" + attributeName + "' of data set '"
+                    + objectPath + "' is not a time stamp.");
+        }
+    }
+
+    HDF5TimeUnit checkIsTimeDuration(final String objectPath, final int dataSetId,
+            ICleanUpRegistry registry) throws HDF5JavaException
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(dataSetId, registry);
+        if (HDF5DataTypeVariant.isTimeDuration(typeVariantOrdinal) == false)
+        {
+            throw new HDF5JavaException("Data set '" + objectPath + "' is not a time duration.");
+        }
+        return HDF5DataTypeVariant.getTimeUnit(typeVariantOrdinal);
+    }
+
+    HDF5TimeUnit checkIsTimeDuration(final String objectPath, final String attributeName,
+            final int dataSetId, ICleanUpRegistry registry) throws HDF5JavaException
+    {
+        final int typeVariantOrdinal = getAttributeTypeVariant(dataSetId, attributeName, registry);
+        if (HDF5DataTypeVariant.isTimeDuration(typeVariantOrdinal) == false)
+        {
+            throw new HDF5JavaException("Attribute '" + attributeName + "' of data set '"
+                    + objectPath + "' is not a time duration.");
+        }
+        return HDF5DataTypeVariant.getTimeUnit(typeVariantOrdinal);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5BaseWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5BaseWriter.java
new file mode 100644
index 0000000..81dba54
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5BaseWriter.java
@@ -0,0 +1,1738 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createAttributeTypeVariantAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createObjectTypeVariantAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getDataTypeGroup;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getTypeVariantDataTypePath;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.isEmpty;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.isNonPositive;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_UNLIMITED;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT16;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT32;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT8;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I16LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I8LE;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.Flushable;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.EnumSet;
+import java.util.LinkedHashSet;
+import java.util.Set;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatasetInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5FileNotFoundException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.namedthread.NamingThreadPoolExecutor;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever.IByteArrayInspector;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.SyncMode;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Class that provides base methods for reading and writing HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5BaseWriter extends HDF5BaseReader
+{
+
+    private static final int SHUTDOWN_TIMEOUT_SECONDS = 60;
+
+    private static final int MAX_TYPE_VARIANT_TYPES = 1024;
+
+    private final static EnumSet<SyncMode> BLOCKING_SYNC_MODES = EnumSet.of(SyncMode.SYNC_BLOCK,
+            SyncMode.SYNC_ON_FLUSH_BLOCK);
+
+    private final static EnumSet<SyncMode> NON_BLOCKING_SYNC_MODES = EnumSet.of(SyncMode.SYNC,
+            SyncMode.SYNC_ON_FLUSH);
+
+    private final static EnumSet<SyncMode> SYNC_ON_CLOSE_MODES = EnumSet.of(SyncMode.SYNC_BLOCK,
+            SyncMode.SYNC);
+
+    /**
+     * The size threshold for the COMPACT storage layout.
+     */
+    final static int COMPACT_LAYOUT_THRESHOLD = 256;
+
+    /**
+     * ExecutorService for calling <code>fsync(2)</code> in a non-blocking way.
+     */
+    private final static ExecutorService syncExecutor = new NamingThreadPoolExecutor("HDF5 Sync")
+            .corePoolSize(3).daemonize();
+
+    static
+    {
+        // Ensure all sync() calls are finished.
+        Runtime.getRuntime().addShutdownHook(new Thread()
+            {
+                @Override
+                public void run()
+                {
+                    syncExecutor.shutdownNow();
+                    try
+                    {
+                        syncExecutor.awaitTermination(SHUTDOWN_TIMEOUT_SECONDS, TimeUnit.SECONDS);
+                    } catch (InterruptedException ex)
+                    {
+                        // Unexpected
+                        ex.printStackTrace();
+                    }
+                }
+            });
+    }
+
+    private final RandomAccessFile fileForSyncing;
+
+    private enum Command
+    {
+        SYNC, CLOSE_ON_EXIT, CLOSE_SYNC, EXIT
+    }
+
+    private final BlockingQueue<Command> commandQueue;
+
+    private final Set<Flushable> flushables = new LinkedHashSet<Flushable>();
+
+    final boolean useExtentableDataTypes;
+
+    final boolean overwriteFile;
+
+    final boolean keepDataSetIfExists;
+
+    final boolean useSimpleDataSpaceForAttributes;
+
+    final SyncMode syncMode;
+
+    final FileFormat fileFormat;
+
+    HDF5BaseWriter(File hdf5File, boolean performNumericConversions, boolean useUTF8CharEncoding,
+            boolean autoDereference, FileFormat fileFormat, boolean useExtentableDataTypes,
+            boolean overwriteFile, boolean keepDataSetIfExists,
+            boolean useSimpleDataSpaceForAttributes, String preferredHouseKeepingNameSuffix,
+            SyncMode syncMode)
+    {
+        super(hdf5File, performNumericConversions, useUTF8CharEncoding, autoDereference,
+                fileFormat, overwriteFile, preferredHouseKeepingNameSuffix);
+        try
+        {
+            this.fileForSyncing = new RandomAccessFile(hdf5File, "rw");
+        } catch (FileNotFoundException ex)
+        {
+            // Should not be happening as openFile() was called in super()
+            throw new HDF5JavaException("Cannot open RandomAccessFile: " + ex.getMessage());
+        }
+        this.fileFormat = fileFormat;
+        this.useExtentableDataTypes = useExtentableDataTypes;
+        this.overwriteFile = overwriteFile;
+        this.keepDataSetIfExists = keepDataSetIfExists;
+        this.useSimpleDataSpaceForAttributes = useSimpleDataSpaceForAttributes;
+        this.syncMode = syncMode;
+        readNamedDataTypes();
+        saveNonDefaultHouseKeepingNameSuffix();
+        commandQueue = new LinkedBlockingQueue<Command>();
+        setupSyncThread();
+    }
+
+    private void setupSyncThread()
+    {
+        syncExecutor.execute(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    while (true)
+                    {
+                        try
+                        {
+                            switch (commandQueue.take())
+                            {
+                                case SYNC:
+                                    syncNow();
+                                    break;
+                                case CLOSE_ON_EXIT:
+                                    closeNow();
+                                    return;
+                                case CLOSE_SYNC:
+                                    closeSync();
+                                    return;
+                                case EXIT:
+                                    return;
+                            }
+                        } catch (InterruptedException ex)
+                        {
+                            // Shutdown has been triggered by showdownNow(), add
+                            // <code>CLOSEHDF</code> to queue.
+                            // (Note that a close() on a closed RandomAccessFile is harmless.)
+                            commandQueue.add(Command.CLOSE_ON_EXIT);
+                        }
+                    }
+                }
+            });
+    }
+
+    @Override
+    int openFile(FileFormat fileFormatInit, boolean overwriteInit)
+    {
+        final boolean enforce_1_8 = (fileFormatInit == FileFormat.STRICTLY_1_8);
+        if (hdf5File.exists() && overwriteInit == false)
+        {
+            if (hdf5File.canWrite() == false)
+            {
+                throw new HDF5FileNotFoundException(hdf5File, "File is not writable.");
+            }
+            return h5.openFileReadWrite(hdf5File.getPath(), enforce_1_8, fileRegistry);
+        } else
+        {
+            final File directory = hdf5File.getParentFile();
+            if (directory.exists() == false)
+            {
+                throw new HDF5FileNotFoundException(directory, "Directory does not exist.");
+            }
+            if (directory.canWrite() == false)
+            {
+                throw new HDF5FileNotFoundException(directory, "Directory is not writable.");
+            }
+            return h5.createFile(hdf5File.getPath(), enforce_1_8, fileRegistry);
+        }
+    }
+
+    /**
+     * Calls <code>fsync(2)</code> in the current thread.
+     */
+    private void syncNow()
+    {
+        try
+        {
+            // Implementation note 1: Unix will call fsync(), , Windows: FlushFileBuffers()
+            // Implementation note 2: We do not call fileForSyncing.getChannel().force(false) which
+            // might be better in terms of performance as if shutdownNow() already has been
+            // triggered on the syncExecutor and thus this thread has already been interrupted,
+            // channel methods would throw a ClosedByInterruptException at us no matter what we do.
+            fileForSyncing.getFD().sync();
+        } catch (IOException ex)
+        {
+            final String msg =
+                    (ex.getMessage() == null) ? ex.getClass().getSimpleName() : ex.getMessage();
+            throw new HDF5JavaException("Error syncing file: " + msg);
+        }
+    }
+
+    /**
+     * Closes and, depending on the sync mode, syncs the HDF5 file in the current thread.
+     * <p>
+     * To be called from the syncer thread only.
+     */
+    private void closeNow()
+    {
+        synchronized (fileRegistry)
+        {
+            if (state == State.OPEN)
+            {
+                flushExternals();
+                flushables.clear();
+                super.close();
+                if (SYNC_ON_CLOSE_MODES.contains(syncMode))
+                {
+                    syncNow();
+                }
+                closeSync();
+            }
+        }
+    }
+
+    private void closeSync()
+    {
+        try
+        {
+            fileForSyncing.close();
+        } catch (IOException ex)
+        {
+            throw new HDF5JavaException("Error closing file: " + ex.getMessage());
+        }
+    }
+
+    boolean addFlushable(Flushable flushable)
+    {
+        return flushables.add(flushable);
+    }
+
+    boolean removeFlushable(Flushable flushable)
+    {
+        return flushables.remove(flushable);
+    }
+
+    void flushExternals()
+    {
+        for (Flushable f : flushables)
+        {
+            try
+            {
+                f.flush();
+            } catch (Throwable ex)
+            {
+                if (f instanceof IErrorStrategy)
+                {
+                    ((IErrorStrategy) f).dealWithError(ex);
+                } else
+                {
+                    throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+                }
+            }
+        }
+    }
+
+    void flush()
+    {
+        synchronized (fileRegistry)
+        {
+            flushExternals();
+            h5.flushFile(fileId);
+            if (NON_BLOCKING_SYNC_MODES.contains(syncMode))
+            {
+                commandQueue.add(Command.SYNC);
+            } else if (BLOCKING_SYNC_MODES.contains(syncMode))
+            {
+                syncNow();
+            }
+        }
+    }
+
+    void flushSyncBlocking()
+    {
+        synchronized (fileRegistry)
+        {
+            flushExternals();
+            h5.flushFile(fileId);
+            syncNow();
+        }
+    }
+
+    @Override
+    void close()
+    {
+        synchronized (fileRegistry)
+        {
+            if (state == State.OPEN)
+            {
+                flushExternals();
+                flushables.clear();
+                super.close();
+                if (SyncMode.SYNC == syncMode)
+                {
+                    commandQueue.add(Command.SYNC);
+                } else if (SyncMode.SYNC_BLOCK == syncMode)
+                {
+                    syncNow();
+                }
+
+                if (EnumSet.complementOf(NON_BLOCKING_SYNC_MODES).contains(syncMode))
+                {
+                    closeSync();
+                    commandQueue.add(Command.EXIT);
+                } else
+                {
+                    // End syncer thread and avoid a race condition for non-blocking sync modes as
+                    // the
+                    // syncer thread still may want to use the fileForSynching
+                    commandQueue.add(Command.CLOSE_SYNC);
+                }
+            }
+        }
+    }
+
+    void saveNonDefaultHouseKeepingNameSuffix()
+    {
+        // If it is empty, then there is nothing to save.
+        if ("".equals(houseKeepingNameSuffix))
+        {
+            return;
+        }
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            final int objectId = h5.openObject(fileId, "/", registry);
+                            setStringAttribute(objectId,
+                                    HDF5Utils.HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME,
+                                    houseKeepingNameSuffix, houseKeepingNameSuffix.length(), false,
+                                    registry);
+                            setIntAttributeAutoSize(objectId,
+                                    HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME,
+                                    houseKeepingNameSuffix.length(), registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    /**
+     * Saves the <var>value</var> as integer attribute <var>attributeName</var> of
+     * <var>objectId</var>, choosing the size of the integer type automatically based on the
+     * <var>value</var>.
+     * 
+     * @param objectId The id of the data set object in the file.
+     */
+    private void setIntAttributeAutoSize(final int objectId, final String attributeName,
+            final int value, ICleanUpRegistry registry)
+    {
+        if (value > Short.MAX_VALUE)
+        {
+            setAttribute(objectId, attributeName, H5T_STD_I32LE, H5T_NATIVE_INT32, -1, new int[]
+                { value }, registry);
+        } else if (value > Byte.MAX_VALUE)
+        {
+            setAttribute(objectId, attributeName, H5T_STD_I16LE, H5T_NATIVE_INT16, -1, new int[]
+                { value }, registry);
+        } else
+        {
+            setAttribute(objectId, attributeName, H5T_STD_I8LE, H5T_NATIVE_INT8, -1, new byte[]
+                { (byte) value }, registry);
+        }
+    }
+
+    @Override
+    void commitDataType(final String dataTypePath, final int dataTypeId)
+    {
+        h5.commitDataType(fileId, dataTypePath, dataTypeId);
+    }
+
+    HDF5EnumerationType openOrCreateTypeVariantDataType(final HDF5Writer writer)
+    {
+        final String typeVariantTypePath = getTypeVariantDataTypePath(houseKeepingNameSuffix);
+        final HDF5EnumerationType dataType;
+        int dataTypeId = getDataTypeId(typeVariantTypePath);
+        if (dataTypeId < 0
+                || h5.getNumberOfMembers(dataTypeId) < HDF5DataTypeVariant.values().length)
+        {
+            final String typeVariantPath = findFirstUnusedTypeVariantPath(writer);
+            dataType = createTypeVariantDataType();
+            commitDataType(typeVariantPath, dataType.getStorageTypeId());
+            writer.createOrUpdateSoftLink(typeVariantPath.substring(getDataTypeGroup(
+                    houseKeepingNameSuffix).length() + 1), typeVariantTypePath);
+        } else
+        {
+            final int nativeDataTypeId = h5.getNativeDataType(dataTypeId, fileRegistry);
+            final String[] typeVariantNames = h5.getNamesForEnumOrCompoundMembers(dataTypeId);
+            dataType =
+                    new HDF5EnumerationType(fileId, dataTypeId, nativeDataTypeId,
+                            typeVariantTypePath, typeVariantNames, this);
+
+        }
+        return dataType;
+    }
+
+    void setEnumArrayAttribute(final String objectPath, final String name,
+            final HDF5EnumerationValueArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int baseMemoryTypeId = value.getType().getNativeTypeId();
+                    final int memoryTypeId =
+                            h5.createArrayType(baseMemoryTypeId, value.getLength(), registry);
+                    final int baseStorageTypeId = value.getType().getStorageTypeId();
+                    final int storageTypeId =
+                            h5.createArrayType(baseStorageTypeId, value.getLength(), registry);
+                    setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                            value.toStorageForm(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(setAttributeRunnable);
+    }
+
+    void setEnumMDArrayAttribute(final String objectPath, final String name,
+            final HDF5EnumerationValueMDArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int baseMemoryTypeId = value.getType().getNativeTypeId();
+                    final int memoryTypeId =
+                            h5.createArrayType(baseMemoryTypeId, value.dimensions(), registry);
+                    final int baseStorageTypeId = value.getType().getStorageTypeId();
+                    final int storageTypeId =
+                            h5.createArrayType(baseStorageTypeId, value.dimensions(), registry);
+                    setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                            value.toStorageForm(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(setAttributeRunnable);
+    }
+
+    <T> void setCompoundArrayAttribute(final String objectPath, final String attributeName,
+            final HDF5CompoundType<T> type, final T[] data,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+        assert data != null;
+
+        checkOpen();
+        type.check(fileId);
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(), data);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    final int baseMemoryTypeId = type.getNativeTypeId();
+                    final int memoryTypeId =
+                            h5.createArrayType(baseMemoryTypeId, data.length, registry);
+                    final int baseStorageTypeId = type.getStorageTypeId();
+                    final int storageTypeId =
+                            h5.createArrayType(baseStorageTypeId, data.length, registry);
+                    setAttribute(objectPath, attributeName, storageTypeId, memoryTypeId, -1,
+                            byteArray, registry);
+                    h5.reclaimCompoundVL(type, byteArray);
+
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(setAttributeRunnable);
+    }
+
+    <T> void setCompoundMDArrayAttribute(final String objectPath, final String attributeName,
+            final HDF5CompoundType<T> type, final MDArray<T> data,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+        assert data != null;
+
+        checkOpen();
+        type.check(fileId);
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(),
+                                    data.getAsFlatArray());
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    final int baseMemoryTypeId = type.getNativeTypeId();
+                    final int memoryTypeId =
+                            h5.createArrayType(baseMemoryTypeId, data.dimensions(), registry);
+                    final int baseStorageTypeId = type.getStorageTypeId();
+                    final int storageTypeId =
+                            h5.createArrayType(baseStorageTypeId, data.dimensions(), registry);
+                    setAttribute(objectPath, attributeName, storageTypeId, memoryTypeId, -1,
+                            byteArray, registry);
+                    h5.reclaimCompoundVL(type, byteArray);
+
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(setAttributeRunnable);
+    }
+
+    private String findFirstUnusedTypeVariantPath(final HDF5Reader reader)
+    {
+        int number = 0;
+        String path;
+        do
+        {
+            path = getTypeVariantDataTypePath(houseKeepingNameSuffix) + "." + (number++);
+        } while (reader.exists(path, false) && number < MAX_TYPE_VARIANT_TYPES);
+        return path;
+    }
+
+    /**
+     * Write a scalar value provided as <code>byte[]</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final byte[] value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            keepDataSetIfExists, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>byte[]</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final byte[] value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, value);
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>byte</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final byte value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            true, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>byte</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final byte value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new byte[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>short</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final short value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            true, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>short</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final short value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new short[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>int</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final int value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            true, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>int</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final int value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new int[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>long</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final long value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            true, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>long</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final long value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new long[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>float</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final float value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            keepDataSetIfExists, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>float</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final float value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new float[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Write a scalar value provided as <code>double</code>.
+     */
+    void writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final double value)
+    {
+        assert dataSetPath != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    writeScalar(dataSetPath, storageDataTypeId, nativeDataTypeId, value, true,
+                            true, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        runner.call(writeScalarRunnable);
+    }
+
+    /**
+     * Internal method for writing a scalar value provided as <code>double</code>.
+     */
+    int writeScalar(final String dataSetPath, final int storageDataTypeId,
+            final int nativeDataTypeId, final double value, final boolean compactLayout,
+            final boolean keepDatasetIfExists, ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, dataSetPath);
+        if (exists && keepDatasetIfExists == false)
+        {
+            h5.deleteObject(fileId, dataSetPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId = h5.openObject(fileId, dataSetPath, registry);
+        } else
+        {
+            dataSetId =
+                    h5.createScalarDataSet(fileId, storageDataTypeId, dataSetPath, compactLayout,
+                            registry);
+        }
+        H5Dwrite(dataSetId, nativeDataTypeId, H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new double[]
+            { value });
+        return dataSetId;
+    }
+
+    /**
+     * Writes a variable-length string array data set.
+     */
+    void writeStringVL(int dataSetId, int memorySpaceId, int fileSpaceId, String[] value)
+    {
+        h5.writeStringVL(dataSetId, variableLengthStringDataTypeId, memorySpaceId, fileSpaceId,
+                value);
+    }
+
+    /**
+     * Writes a variable-length string array data set.
+     */
+    void writeStringVL(int dataSetId, String[] value)
+    {
+        h5.writeStringVL(dataSetId, variableLengthStringDataTypeId, value);
+    }
+
+    /**
+     * Writes a variable-length string array attribute.
+     */
+    void writeAttributeStringVL(int attributeId, String[] value)
+    {
+        h5.writeAttributeStringVL(attributeId, variableLengthStringDataTypeId, value);
+    }
+
+    /**
+     * Creates a data set.
+     */
+    int createDataSet(final String objectPath, final int storageDataTypeId,
+            final HDF5AbstractStorageFeatures features, final long[] dimensions,
+            final long[] chunkSizeOrNull, int elementLength, final ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        final boolean empty = isEmpty(dimensions);
+        final boolean chunkSizeProvided =
+                (chunkSizeOrNull != null && isNonPositive(chunkSizeOrNull) == false);
+        final long[] definitiveChunkSizeOrNull;
+        if (h5.exists(fileId, objectPath))
+        {
+            if (keepDataIfExists(features))
+            {
+                return h5.openDataSet(fileId, objectPath, registry);
+            }
+            h5.deleteObject(fileId, objectPath);
+        }
+        if (empty)
+        {
+            definitiveChunkSizeOrNull =
+                    chunkSizeProvided ? chunkSizeOrNull : HDF5Utils.tryGetChunkSize(dimensions,
+                            elementLength, features.requiresChunking(), true);
+        } else if (features.tryGetProposedLayout() == HDF5StorageLayout.COMPACT
+                || features.tryGetProposedLayout() == HDF5StorageLayout.CONTIGUOUS
+                || (useExtentableDataTypes == false) && features.requiresChunking() == false)
+        {
+            definitiveChunkSizeOrNull = null;
+        } else if (chunkSizeProvided)
+        {
+            definitiveChunkSizeOrNull = chunkSizeOrNull;
+        } else
+        {
+            definitiveChunkSizeOrNull =
+                    HDF5Utils
+                            .tryGetChunkSize(
+                                    dimensions,
+                                    elementLength,
+                                    features.requiresChunking(),
+                                    useExtentableDataTypes
+                                            || features.tryGetProposedLayout() == HDF5StorageLayout.CHUNKED);
+        }
+        final HDF5StorageLayout layout =
+                determineLayout(storageDataTypeId, dimensions, definitiveChunkSizeOrNull,
+                        features.tryGetProposedLayout());
+        dataSetId =
+                h5.createDataSet(fileId, dimensions, definitiveChunkSizeOrNull, storageDataTypeId,
+                        features, objectPath, layout, fileFormat, registry);
+        return dataSetId;
+    }
+
+    boolean keepDataIfExists(final HDF5AbstractStorageFeatures features)
+    {
+        switch (features.getDatasetReplacementPolicy())
+        {
+            case ENFORCE_KEEP_EXISTING:
+                return true;
+            case ENFORCE_REPLACE_WITH_NEW:
+                return false;
+            case USE_WRITER_DEFAULT:
+            default:
+                return keepDataSetIfExists;
+        }
+    }
+
+    /**
+     * Determine which {@link HDF5StorageLayout} to use for the given <var>storageDataTypeId</var>.
+     */
+    HDF5StorageLayout determineLayout(final int storageDataTypeId, final long[] dimensions,
+            final long[] chunkSizeOrNull, final HDF5StorageLayout proposedLayoutOrNull)
+    {
+        if (chunkSizeOrNull != null)
+        {
+            return HDF5StorageLayout.CHUNKED;
+        }
+        if (proposedLayoutOrNull != null)
+        {
+            return proposedLayoutOrNull;
+        }
+        if (computeSizeForDimensions(storageDataTypeId, dimensions) < HDF5BaseWriter.COMPACT_LAYOUT_THRESHOLD)
+        {
+            return HDF5StorageLayout.COMPACT;
+        }
+        return HDF5StorageLayout.CONTIGUOUS;
+    }
+
+    private int computeSizeForDimensions(int dataTypeId, long[] dimensions)
+    {
+        int size = h5.getDataTypeSize(dataTypeId);
+        for (long d : dimensions)
+        {
+            size *= d;
+        }
+        return size;
+    }
+
+    /**
+     * Checks whether the given <var>dimensions</var> are in bounds for <var>dataSetId</var>.
+     */
+    boolean areDimensionsInBounds(final int dataSetId, final long[] dimensions)
+    {
+        final long[] maxDimensions = h5.getDataMaxDimensions(dataSetId);
+
+        if (dimensions.length != maxDimensions.length) // Actually an error condition
+        {
+            return false;
+        }
+
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            if (maxDimensions[i] != H5S_UNLIMITED && dimensions[i] > maxDimensions[i])
+            {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Returns the data set id for the given <var>objectPath</var>. If the data sets exists, it
+     * depends on the <code>features</code> and on the status of <code>keepDataSetIfExists</code>
+     * whether the existing data set will be opened or whether the data set will be deleted and
+     * re-created.
+     */
+    int getOrCreateDataSetId(final String objectPath, final int storageDataTypeId,
+            final long[] dimensions, int elementLength, final HDF5AbstractStorageFeatures features,
+            ICleanUpRegistry registry)
+    {
+        final int dataSetId;
+        boolean exists = h5.exists(fileId, objectPath);
+        if (exists && keepDataIfExists(features) == false)
+        {
+            h5.deleteObject(fileId, objectPath);
+            exists = false;
+        }
+        if (exists)
+        {
+            dataSetId =
+                    h5.openAndExtendDataSet(fileId, objectPath, fileFormat, dimensions,
+                            storageDataTypeId, registry);
+        } else
+        {
+            dataSetId =
+                    createDataSet(objectPath, storageDataTypeId, features, dimensions, null,
+                            elementLength, registry);
+        }
+        return dataSetId;
+    }
+
+    void setDataSetDimensions(final String objectPath, final long[] newDimensions,
+            ICleanUpRegistry registry)
+    {
+        assert newDimensions != null;
+
+        final int dataSetId = h5.openDataSet(fileId, objectPath, registry);
+        try
+        {
+            h5.setDataSetExtentChunked(dataSetId, newDimensions);
+        } catch (HDF5DatasetInterfaceException ex)
+        {
+            if (HDF5StorageLayout.CHUNKED != h5.getLayout(dataSetId, registry))
+            {
+                throw new HDF5JavaException("Cannot change dimensions of non-extendable data set.");
+            } else
+            {
+                throw ex;
+            }
+        }
+    }
+
+    //
+    // Attributes
+    //
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final byte[] value,
+            ICleanUpRegistry registry)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final byte[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final short[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId,
+                                    dataSpaceId, value, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final short[] value,
+            ICleanUpRegistry registry)
+    {
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final short[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final int[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId,
+                                    dataSpaceId, value, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final int[] value,
+            ICleanUpRegistry registry)
+    {
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final int[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final long[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId,
+                                    dataSpaceId, value, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final long[] value,
+            ICleanUpRegistry registry)
+    {
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final String objectPath, final String name,
+            final HDF5DataTypeVariant typeVariant, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final long[] value,
+            ICleanUpRegistry registry)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+        setTypeVariant(objectId, name, (dataSpaceId != -1), typeVariant, registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final long[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final float[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId,
+                                    dataSpaceId, value, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final float[] value,
+            ICleanUpRegistry registry)
+    {
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final float[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final double[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert storageDataTypeId >= 0;
+        assert nativeDataTypeId >= 0;
+        assert value != null;
+
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            setAttribute(objectPath, name, storageDataTypeId, nativeDataTypeId,
+                                    dataSpaceId, value, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        runner.call(addAttributeRunnable);
+    }
+
+    void setAttribute(final String objectPath, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final double[] value,
+            ICleanUpRegistry registry)
+    {
+        final int objectId = h5.openObject(fileId, objectPath, registry);
+        setAttribute(objectId, name, storageDataTypeId, nativeDataTypeId, dataSpaceId, value,
+                registry);
+    }
+
+    void setAttribute(final int objectId, final String name, final int storageDataTypeId,
+            final int nativeDataTypeId, final int dataSpaceId, final double[] value,
+            ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, storageDataTypeId, dataSpaceId, registry);
+        }
+        h5.writeAttribute(attributeId, nativeDataTypeId, value);
+    }
+
+    void setTypeVariant(final int objectId, final HDF5DataTypeVariant typeVariant,
+            ICleanUpRegistry registry)
+    {
+        setAttribute(objectId, createObjectTypeVariantAttributeName(houseKeepingNameSuffix),
+                typeVariantDataType.getStorageTypeId(), typeVariantDataType.getNativeTypeId(), -1,
+                typeVariantDataType.getEnumType().toStorageForm(typeVariant.ordinal()), registry);
+    }
+
+    void setTypeVariant(final int objectId, final String attributeName,
+            final boolean enforceSimpleDataSpace, final HDF5DataTypeVariant typeVariant,
+            ICleanUpRegistry registry)
+    {
+        final int dataSpaceId = enforceSimpleDataSpace ? h5.createSimpleDataSpace(new long[]
+            { 1 }, registry) : -1;
+        setAttribute(objectId,
+                createAttributeTypeVariantAttributeName(attributeName, houseKeepingNameSuffix),
+                typeVariantDataType.getStorageTypeId(), typeVariantDataType.getNativeTypeId(),
+                dataSpaceId,
+                typeVariantDataType.getEnumType().toStorageForm(typeVariant.ordinal()), registry);
+    }
+
+    void setStringAttribute(final int objectId, final String name, final String value,
+            final int maxLength, final boolean lengthFitsValue, ICleanUpRegistry registry)
+    {
+        final byte[] bytes;
+        final int realMaxLengthInBytes;
+        if (lengthFitsValue)
+        {
+            bytes = StringUtils.toBytes(value, encodingForNewDataSets);
+            realMaxLengthInBytes = (bytes.length == 0) ? 1 : bytes.length;
+        } else
+        {
+            bytes = StringUtils.toBytes(value, maxLength, encodingForNewDataSets);
+            realMaxLengthInBytes =
+                    encodingForNewDataSets.getMaxBytesPerChar()
+                            * ((maxLength == 0) ? 1 : maxLength);
+        }
+        final int storageDataTypeId = h5.createDataTypeString(realMaxLengthInBytes, registry);
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+            }
+        } else
+        {
+            attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+        }
+        h5.writeAttribute(attributeId, storageDataTypeId,
+                StringUtils.cutOrPadBytes(bytes, realMaxLengthInBytes));
+    }
+
+    class StringArrayBuffer
+    {
+        private byte[] buf;
+
+        private int len;
+
+        private int realMaxLengthPerString;
+
+        private boolean valueContainsChar0;
+
+        private int[] lengths;
+
+        private final int maxLengthPerString;
+
+        private final boolean lengthFitsValue;
+
+        StringArrayBuffer(int maxLengthPerString, boolean lengthFitsValue)
+        {
+            this.maxLengthPerString = maxLengthPerString;
+            this.lengthFitsValue = lengthFitsValue;
+        }
+
+        void addAll(String[] array)
+        {
+            if (lengthFitsValue)
+            {
+                addAllLengthFitsValue(array);
+            } else
+            {
+                addAllLengthFixedLength(array);
+            }
+        }
+
+        private void addAllLengthFixedLength(String[] array)
+        {
+            this.realMaxLengthPerString =
+                    encodingForNewDataSets.getMaxBytesPerChar() * maxLengthPerString;
+            this.buf = new byte[realMaxLengthPerString * array.length];
+            this.lengths = new int[array.length];
+            int idx = 0;
+            for (String s : array)
+            {
+                final byte[] data =
+                        StringUtils.toBytes(s, maxLengthPerString, encodingForNewDataSets);
+                final int dataLen = Math.min(data.length, realMaxLengthPerString);
+                final int newLen = len + realMaxLengthPerString;
+                System.arraycopy(data, 0, buf, len, dataLen);
+                len = newLen;
+                if (valueContainsChar0 == false)
+                {
+                    valueContainsChar0 |= s.contains("\0");
+                }
+                lengths[idx++] = dataLen;
+            }
+        }
+
+        private void addAllLengthFitsValue(String[] array)
+        {
+            final byte[][] data = new byte[array.length][];
+            this.lengths = new int[array.length];
+            int idx = 0;
+            for (String s : array)
+            {
+                final byte[] bytes = StringUtils.toBytes(s, encodingForNewDataSets);
+                realMaxLengthPerString = Math.max(realMaxLengthPerString, bytes.length);
+                data[idx] = bytes;
+                lengths[idx] = bytes.length;
+                if (valueContainsChar0 == false)
+                {
+                    valueContainsChar0 |= s.contains("\0");
+                }
+                ++idx;
+            }
+            this.buf = new byte[realMaxLengthPerString * array.length];
+            for (byte[] bytes : data)
+            {
+                System.arraycopy(bytes, 0, buf, len, bytes.length);
+                len = len + realMaxLengthPerString;
+            }
+        }
+
+        byte[] toArray()
+        {
+            return StringUtils.cutOrPadBytes(buf, len);
+        }
+
+        int getMaxLengthInByte()
+        {
+            return (realMaxLengthPerString == 0) ? 1 : realMaxLengthPerString;
+        }
+
+        boolean shouldSaveExplicitLength()
+        {
+            return valueContainsChar0 || (realMaxLengthPerString == 0);
+        }
+
+        int[] getLengths()
+        {
+            return lengths;
+        }
+    }
+
+    void setStringArrayAttribute(final int objectId, final String name, final String[] value,
+            final int maxLength, final boolean lengthFitsValue, ICleanUpRegistry registry)
+    {
+        final StringArrayBuffer array = new StringArrayBuffer(maxLength, lengthFitsValue);
+        array.addAll(value);
+        final byte[] arrData = array.toArray();
+        final int stringDataTypeId = h5.createDataTypeString(array.getMaxLengthInByte(), registry);
+        final int storageDataTypeId = h5.createArrayType(stringDataTypeId, value.length, registry);
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+            }
+        } else
+        {
+            attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+        }
+        h5.writeAttribute(attributeId, storageDataTypeId, arrData);
+    }
+
+    void setStringArrayAttribute(final int objectId, final String name,
+            final MDArray<String> value, final int maxLength, final boolean lengthFitsValue,
+            ICleanUpRegistry registry)
+    {
+        final StringArrayBuffer array = new StringArrayBuffer(maxLength, lengthFitsValue);
+        array.addAll(value.getAsFlatArray());
+        final byte[] arrData = array.toArray();
+        final int stringDataTypeId = h5.createDataTypeString(array.getMaxLengthInByte(), registry);
+        final int storageDataTypeId =
+                h5.createArrayType(stringDataTypeId, value.dimensions(), registry);
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, storageDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+            }
+        } else
+        {
+            attributeId = h5.createAttribute(objectId, name, storageDataTypeId, -1, registry);
+        }
+        h5.writeAttribute(attributeId, storageDataTypeId, arrData);
+    }
+
+    void setStringAttributeVariableLength(final int objectId, final String name,
+            final String value, ICleanUpRegistry registry)
+    {
+        int attributeId;
+        if (h5.existsAttribute(objectId, name))
+        {
+            attributeId = h5.openAttribute(objectId, name, registry);
+            final int oldStorageDataTypeId = h5.getDataTypeForAttribute(attributeId, registry);
+            if (h5.dataTypesAreEqual(oldStorageDataTypeId, variableLengthStringDataTypeId) == false)
+            {
+                h5.deleteAttribute(objectId, name);
+                attributeId =
+                        h5.createAttribute(objectId, name, variableLengthStringDataTypeId, -1,
+                                registry);
+            }
+        } else
+        {
+            attributeId =
+                    h5.createAttribute(objectId, name, variableLengthStringDataTypeId, -1, registry);
+        }
+        writeAttributeStringVL(attributeId, new String[]
+            { value });
+    }
+
+    String moveLinkOutOfTheWay(String linkPath)
+    {
+        final String newLinkPath = createNonExistentReplacementLinkPath(linkPath);
+        h5.moveLink(fileId, linkPath, newLinkPath);
+        return newLinkPath;
+    }
+
+    private String createNonExistentReplacementLinkPath(final String dataTypePath)
+    {
+        final String dstLinkPath = dataTypePath + "__REPLACED_";
+        int idx = 1;
+        while (h5.exists(fileId, dstLinkPath + idx))
+        {
+            ++idx;
+        }
+        return dstLinkPath + idx;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanReader.java
new file mode 100644
index 0000000..13ca99c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanReader.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_B64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT64;
+
+import java.util.BitSet;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@link IHDF5BooleanReader}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5BooleanReader implements IHDF5BooleanReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    HDF5BooleanReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public boolean getAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Boolean> writeRunnable = new ICallableWithCleanUp<Boolean>()
+            {
+                @Override
+                public Boolean call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForAttribute(attributeId, registry);
+                    byte[] data =
+                            baseReader.h5
+                                    .readAttributeAsByteArray(attributeId, nativeDataTypeId, 1);
+                    final Boolean value =
+                            baseReader.h5.tryGetBooleanValue(nativeDataTypeId, data[0]);
+                    if (value == null)
+                    {
+                        throw new HDF5JavaException("Attribute " + attributeName + " of path "
+                                + objectPath + " needs to be a Boolean.");
+                    }
+                    return value;
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public boolean read(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Boolean> writeRunnable = new ICallableWithCleanUp<Boolean>()
+            {
+                @Override
+                public Boolean call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    final byte[] data = new byte[1];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, data);
+                    final Boolean value =
+                            baseReader.h5.tryGetBooleanValue(nativeDataTypeId, data[0]);
+                    if (value == null)
+                    {
+                        throw new HDF5JavaException(objectPath + " needs to be a Boolean.");
+                    }
+                    return value;
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public BitSet readBitField(final String objectPath) throws HDF5DatatypeInterfaceException
+    {
+        baseReader.checkOpen();
+        return BitSetConversionUtils.fromStorageForm(readBitFieldStorageForm(objectPath));
+    }
+
+    private long[] readBitFieldStorageForm(final String objectPath)
+    {
+        assert objectPath != null;
+
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_B64, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public BitSet readBitFieldBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return readBitFieldBlockWithOffset(objectPath, blockSize, blockSize * blockNumber);
+    }
+
+    private long[] readBitFieldStorageForm(final String objectPath, final int blockSize,
+            final long offset, final boolean nullWhenOutside)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParamsOrNull =
+                            baseReader.tryGetSpaceParameters(dataSetId, offset, blockSize,
+                                    nullWhenOutside, registry);
+                    if (spaceParamsOrNull == null)
+                    {
+                        return null;
+                    }
+                    final long[] data = new long[spaceParamsOrNull.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_B64,
+                            spaceParamsOrNull.memorySpaceId, spaceParamsOrNull.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public BitSet readBitFieldBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        baseReader.checkOpen();
+        return BitSetConversionUtils.fromStorageForm(readBitFieldStorageForm(objectPath, blockSize,
+                offset, false));
+    }
+
+    @Override
+    public boolean isBitSet(String objectPath, int bitIndex)
+    {
+        final int wordIndex = BitSetConversionUtils.getWordIndex(bitIndex);
+        final long[] storageFormOrNull = readBitFieldStorageForm(objectPath, 1, wordIndex, true);
+        // If the bitIndex is outside of the bit field, we return false
+        if (storageFormOrNull == null)
+        {
+            return false;
+        }
+        final long word = storageFormOrNull[0];
+        return (word & BitSetConversionUtils.getBitMaskInWord(bitIndex)) != 0;
+    }
+
+    @Override
+    public BitSet[] readBitFieldArray(String objectPath)
+    {
+        baseReader.checkOpen();
+        return BitSetConversionUtils.fromStorageForm2D(readBitFieldArrayStorageForm(objectPath));
+    }
+
+    private MDLongArray readBitFieldArrayStorageForm(final String objectPath)
+    {
+        assert objectPath != null;
+
+        final ICallableWithCleanUp<MDLongArray> readCallable =
+                new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, registry);
+                            checkDimensions2D(spaceParams.dimensions);
+                            final long[] data = new long[spaceParams.blockSize];
+                            if (baseReader.isScaledBitField(dataSetId, registry))
+                            {
+                                baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64,
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            } else
+                            {
+                                baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_B64,
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            }
+                            return new MDLongArray(data, spaceParams.dimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public BitSet[] readBitFieldArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        baseReader.checkOpen();
+        return BitSetConversionUtils.fromStorageForm2D(readBitFieldBlockStorageForm2D(objectPath,
+                blockSize, offset, true));
+    }
+
+    private MDLongArray readBitFieldBlockStorageForm2D(final String objectPath, final int blockSize,
+            final long offset, final boolean nullWhenOutside)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+            {
+                @Override
+                public MDLongArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final long[] dimensions = baseReader.h5.getDataDimensions(dataSetId, registry);
+                    checkDimensions2D(dimensions);
+                    final int numberOfWords = dimToInt(dimensions[0]);
+                    final int[] blockDimensions = new int[]
+                            { numberOfWords, blockSize };
+                    final DataSpaceParameters spaceParamsOrNull =
+                            baseReader.tryGetSpaceParameters(dataSetId, new long[]
+                                { 0, offset }, blockDimensions, nullWhenOutside, registry);
+                    if (spaceParamsOrNull == null)
+                    {
+                        return null;
+                    }
+                    final long[] data = new long[spaceParamsOrNull.blockSize];
+                    if (baseReader.isScaledBitField(dataSetId, registry))
+                    {
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64,
+                                spaceParamsOrNull.memorySpaceId, spaceParamsOrNull.dataSpaceId, data);
+                    } else
+                    {
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_B64,
+                                spaceParamsOrNull.memorySpaceId, spaceParamsOrNull.dataSpaceId, data);
+                    }
+                    return new MDLongArray(data, blockDimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private static void checkDimensions2D(final long[] dimensions)
+    {
+        if (dimensions.length != 2)
+        {
+            throw new HDF5JavaException(
+                    "Array is supposed to be of rank 2, but is of rank "
+                            + dimensions.length);
+        }
+    }
+    
+    static int dimToInt(long longNumber)
+    {
+        final int intNumber = (int) longNumber;
+        if (intNumber != longNumber)
+        {
+            throw new HDF5JavaException("Dimension " + longNumber + " out of bounds.");
+        }
+        return intNumber;
+    }
+
+    @Override
+    public BitSet[] readBitFieldArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return readBitFieldArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanWriter.java
new file mode 100644
index 0000000..e8ac9d8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5BooleanWriter.java
@@ -0,0 +1,446 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_B64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_B64LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import java.util.BitSet;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * Implementation of {@link IHDF5BooleanWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5BooleanWriter extends HDF5BooleanReader implements IHDF5BooleanWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5BooleanWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final boolean value)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            final byte byteValue = (byte) (value ? 1 : 0);
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, baseWriter.booleanDataTypeId,
+                                        baseWriter.booleanDataTypeId, dataSpaceId, new byte[]
+                                            { byteValue }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, baseWriter.booleanDataTypeId,
+                                        baseWriter.booleanDataTypeId, -1, new byte[]
+                                            { byteValue }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final boolean value)
+    {
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, baseWriter.booleanDataTypeId,
+                baseWriter.booleanDataTypeId, HDFNativeData.byteToByte((byte) (value ? 1 : 0)));
+    }
+
+    @Override
+    public void writeBitField(final String objectPath, final BitSet data)
+    {
+        writeBitField(objectPath, data, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeBitField(final String objectPath, final BitSet data,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int longBits = longBytes * 8;
+                    final int msb = data.length();
+                    final int realLength = msb / longBits + (msb % longBits != 0 ? 1 : 0);
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_B64LE, new long[]
+                                { realLength }, longBytes, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_B64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            BitSetConversionUtils.toStorageForm(data));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createBitField(String objectPath, int size)
+    {
+        createBitField(objectPath, size, GENERIC_NO_COMPRESSION);
+
+    }
+
+    @Override
+    public void createBitField(String objectPath, long size, int blockSize)
+    {
+        createBitField(objectPath, size, blockSize, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createBitField(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_B64LE, features, new long[]
+                            { 0 }, new long[]
+                            { size }, 8, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_B64LE, features, new long[]
+                            { size }, null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createBitField(final String objectPath, final long size, final int blockSize,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_B64LE, features, new long[]
+                        { size }, new long[]
+                        { blockSize }, 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeBitFieldBlock(String objectPath, BitSet data, int dataSize, long blockNumber)
+    {
+        writeBitFieldBlockWithOffset(objectPath, data, dataSize, dataSize * blockNumber);
+    }
+
+    @Override
+    public void writeBitFieldBlockWithOffset(final String objectPath, final BitSet data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_B64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            BitSetConversionUtils.toStorageForm(data, dataSize));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeBitFieldArray(final String objectPath, final BitSet[] data)
+    {
+        writeBitFieldArray(objectPath, data, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeBitFieldArray(final String objectPath, final BitSet[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int longBits = longBytes * 8;
+                    final int msb = BitSetConversionUtils.getMaxLength(data);
+                    final int numberOfWords = msb / longBits + (msb % longBits != 0 ? 1 : 0);
+                    if (features.isScaling() && msb < longBits)
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.build(features).scalingFactor((byte) msb)
+                                        .features();
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_U64LE,
+                                        new long[]
+                                            { numberOfWords, data.length }, longBytes,
+                                        actualFeatures, registry);
+                        H5Dwrite(dataSetId, H5T_NATIVE_UINT64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                                BitSetConversionUtils.toStorageForm(data, numberOfWords));
+                        baseWriter
+                                .setTypeVariant(dataSetId, HDF5DataTypeVariant.BITFIELD, registry);
+                    } else
+                    {
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.build(features).noScaling().features();
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_B64LE,
+                                        new long[]
+                                            { numberOfWords, data.length }, longBytes,
+                                        actualFeatures, registry);
+                        H5Dwrite(dataSetId, H5T_NATIVE_B64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                                BitSetConversionUtils.toStorageForm(data, numberOfWords));
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createBitFieldArray(final String objectPath, final int bitFieldSize,
+            final long arraySize, final long arrayBlockSize, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int longBits = longBytes * 8;
+                    final int numberOfWords =
+                            bitFieldSize / longBits + (bitFieldSize % longBits != 0 ? 1 : 0);
+                    if (features.requiresChunking() || arraySize > 0)
+                    {
+                        create(objectPath, new long[]
+                            { numberOfWords, arraySize }, new long[]
+                            { numberOfWords, arrayBlockSize }, features, registry);
+                    } else
+                    {
+                        create(objectPath, new long[]
+                                { numberOfWords, arrayBlockSize }, null, features, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+
+                @SuppressWarnings("hiding")
+                void create(final String objectPath, final long[] dimensions,
+                        final long[] blockDimensionsOrNull, final HDF5IntStorageFeatures features,
+                        ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int longBits = longBytes * 8;
+                    if (features.isScaling() && bitFieldSize < longBits)
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.build(features)
+                                        .scalingFactor((byte) bitFieldSize).features();
+                        final int dataSetId =
+                                baseWriter.createDataSet(objectPath, H5T_STD_U64LE, actualFeatures,
+                                        dimensions, blockDimensionsOrNull, longBytes, registry);
+                        baseWriter
+                                .setTypeVariant(dataSetId, HDF5DataTypeVariant.BITFIELD, registry);
+                    } else
+                    {
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.build(features).noScaling().features();
+                        baseWriter.createDataSet(objectPath, H5T_STD_B64LE, actualFeatures,
+                                dimensions, blockDimensionsOrNull, longBytes, registry);
+                    }
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arrayBlockSize,
+            HDF5IntStorageFeatures features)
+    {
+        createBitFieldArray(objectPath, bitFieldSize, 0, arrayBlockSize, features);
+    }
+
+    @Override
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arraySize,
+            long arrayBlockSize)
+    {
+        createBitFieldArray(objectPath, bitFieldSize, arraySize, arrayBlockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arrayBlockSize)
+    {
+        createBitFieldArray(objectPath, bitFieldSize, 0, arrayBlockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeBitFieldArrayBlockWithOffset(final String objectPath, final BitSet[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { -1, offset + dataSize }, -1, registry);
+                    final long[] dimensions = baseWriter.h5.getDataDimensions(dataSetId, registry);
+                    if (dimensions.length != 2)
+                    {
+                        throw new HDF5JavaException(
+                                "Array is supposed to be of rank 2, but is of rank "
+                                        + dimensions.length);
+                    }
+                    final int numberOfWords = dimToInt(dimensions[0]);
+                    final long[] blockDimensions = new long[]
+                        { numberOfWords, dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { 0, offset };
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+
+                    if (baseWriter.isScaledBitField(dataSetId, registry))
+                    {
+                        H5Dwrite(dataSetId, H5T_NATIVE_UINT64, memorySpaceId, dataSpaceId,
+                                H5P_DEFAULT,
+                                BitSetConversionUtils.toStorageForm(data, numberOfWords));
+                    } else
+                    {
+                        H5Dwrite(dataSetId, H5T_NATIVE_B64, memorySpaceId, dataSpaceId,
+                                H5P_DEFAULT,
+                                BitSetConversionUtils.toStorageForm(data, numberOfWords));
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeBitFieldArrayBlockWithOffset(String objectPath, BitSet[] data, long offset)
+    {
+        writeBitFieldArrayBlockWithOffset(objectPath, data, data.length, offset);
+    }
+
+    @Override
+    public void writeBitFieldArrayBlock(String objectPath, BitSet[] data, int dataSize,
+            long blockNumber)
+    {
+        writeBitFieldArrayBlockWithOffset(objectPath, data, dataSize, dataSize * blockNumber);
+    }
+
+    @Override
+    public void writeBitFieldArrayBlock(String objectPath, BitSet[] data, long blockNumber)
+    {
+        writeBitFieldArrayBlock(objectPath, data, data.length, blockNumber);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ByteReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ByteReader.java
new file mode 100644
index 0000000..e20d123
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ByteReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT8;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5ByteReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ByteReader implements IHDF5ByteReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5ByteReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public byte getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Byte> getAttributeRunnable = new ICallableWithCleanUp<Byte>()
+            {
+                @Override
+                public Byte call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final byte[] data =
+                            baseReader.h5.readAttributeAsByteArray(attributeId, H5T_NATIVE_INT8, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public byte[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> getAttributeRunnable =
+                new ICallableWithCleanUp<byte[]>()
+                    {
+                        @Override
+                        public byte[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getByteArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDByteArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDByteArray>()
+                    {
+                        @Override
+                        public MDByteArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getByteMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public byte[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDByteArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public byte read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Byte> readCallable = new ICallableWithCleanUp<Byte>()
+            {
+                @Override
+                public Byte call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final byte[] data = new byte[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT8, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readByteArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private byte[] readByteArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final byte[] data = new byte[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT8, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readByteArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private byte[] readByteArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final byte[] data = new byte[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT8, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDByteArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT8, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDByteArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT8, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public byte[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final byte[] data = new byte[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT8, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public byte[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public byte[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDByteArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDByteArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDByteArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDByteArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDByteArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> readCallable = new ICallableWithCleanUp<MDByteArray>()
+            {
+                @Override
+                public MDByteArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readByteMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDByteArray readByteMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final byte[] data = new byte[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT8, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDByteArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readByteMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDByteArray readByteMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT8, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final byte[] data = new byte[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDByteArray(data, arrayDimensions);
+        } else
+        {
+            final byte[] data =
+                    new byte[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDByteArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDByteArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDByteArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> readCallable = new ICallableWithCleanUp<MDByteArray>()
+            {
+                @Override
+                public MDByteArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final byte[] dataBlock = new byte[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT8,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDByteArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDByteArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final byte[] dataBlock =
+                new byte[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT8, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDByteArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<byte[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<byte[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<byte[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<byte[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<byte[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final byte[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<byte[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDByteArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDByteArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDByteArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDByteArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDByteArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDByteArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDByteArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    byte[] getByteArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_INT8, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_INT8;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final byte[] data =
+                baseReader.h5.readAttributeAsByteArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDByteArray getByteMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_INT8,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_INT8;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final byte[] data =
+                    baseReader.h5.readAttributeAsByteArray(attributeId,
+                            memoryTypeId, len);
+            return new MDByteArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ByteWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ByteWriter.java
new file mode 100644
index 0000000..9ca830e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ByteWriter.java
@@ -0,0 +1,703 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT8;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I8LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5ByteWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ByteWriter extends HDF5ByteReader implements IHDF5ByteWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5ByteWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final byte value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I8LE,
+                                        H5T_NATIVE_INT8, dataSpaceId, new byte[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I8LE,
+                                        H5T_NATIVE_INT8, -1, new byte[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final byte[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I8LE, H5T_NATIVE_INT8,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT8, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I8LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDByteArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I8LE, H5T_NATIVE_INT8,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT8, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I8LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final byte[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDByteArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final byte value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_I8LE, H5T_NATIVE_INT8, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final byte[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final byte[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, new long[]
+                                { data.length }, 1, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT8, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                            features, new long[] { 0 }, new long[] { size }, 1, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                            features, new long[] { size }, null, 1, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 1, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final byte[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final byte[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT8, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final byte[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final byte[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDByteArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final byte[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDByteArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDByteArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDByteArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDByteArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDByteArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                                    data.longDimensions(), 1, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT8, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 1, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                                features, MDArray.toLong(dimensions), null, 1, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 1, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDByteArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDByteArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDByteArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT8, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT8, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CommonInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CommonInformation.java
new file mode 100644
index 0000000..2e25332
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CommonInformation.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5L_TYPE_EXTERNAL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5L_TYPE_SOFT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5O_TYPE_DATASET;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5O_TYPE_GROUP;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5O_TYPE_NAMED_DATATYPE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5O_TYPE_NTYPES;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * The common super class of {@link HDF5LinkInformation} and {@link HDF5ObjectInformation}. 
+ *
+ * @author Bernd Rinn
+ */
+class HDF5CommonInformation
+{
+
+    protected final String path;
+    protected final HDF5ObjectType type;
+
+    static HDF5ObjectType objectTypeIdToObjectType(final int objectTypeId)
+    {
+        if (-1 == objectTypeId)
+        {
+            return HDF5ObjectType.NONEXISTENT;
+        } else if (H5O_TYPE_GROUP == objectTypeId)
+        {
+            return HDF5ObjectType.GROUP;
+        } else if (H5O_TYPE_DATASET == objectTypeId)
+        {
+            return HDF5ObjectType.DATASET;
+        } else if (H5O_TYPE_NAMED_DATATYPE == objectTypeId)
+        {
+            return HDF5ObjectType.DATATYPE;
+        } else if (objectTypeId >= H5O_TYPE_NTYPES)
+        {
+            final int linkTypeId = objectTypeId - H5O_TYPE_NTYPES;
+            if (linkTypeId == H5L_TYPE_SOFT)
+            {
+                return HDF5ObjectType.SOFT_LINK;
+            } else if (linkTypeId == H5L_TYPE_EXTERNAL)
+            {
+                return HDF5ObjectType.EXTERNAL_LINK;
+            }
+        }
+        return HDF5ObjectType.OTHER;
+    }
+
+    HDF5CommonInformation(String path, HDF5ObjectType type)
+    {
+        assert path != null;
+        assert type != null;
+
+        this.path = path;
+        this.type = type;
+    }
+
+    /**
+     * @throws HDF5JavaException If the link does not exist.
+     */
+    public void checkExists() throws HDF5JavaException
+    {
+        if (exists() == false)
+        {
+            throw new HDF5JavaException("Link '" + getPath() + "' does not exist.");
+        }
+    }
+
+    /**
+     * Returns the path of this link in the HDF5 file.
+     */
+    public String getPath()
+    {
+        return path;
+    }
+
+    /**
+     * Returns the parent of the path of this link the HDF5 file. If this link corresponds to the
+     * root, then this method will return the root ("/") itself.
+     */
+    public String getParentPath()
+    {
+        final int lastSlashIndex = path.lastIndexOf('/');
+        if (lastSlashIndex <= 0)
+        {
+            return "/";
+        } else
+        {
+            return path.substring(0, lastSlashIndex);
+        }
+    }
+
+    /**
+     * Returns the name of this link in the HDF5 file (the path without the parent).
+     */
+    public String getName()
+    {
+        return path.substring(path.lastIndexOf('/') + 1);
+    }
+
+    /**
+     * Returns the type of this link.
+     */
+    public HDF5ObjectType getType()
+    {
+        return type;
+    }
+
+    /**
+     * Returns <code>true</code>, if the link exists.
+     */
+    public boolean exists()
+    {
+        return HDF5ObjectType.exists(type);
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is a group.
+     */
+    public boolean isGroup()
+    {
+        return HDF5ObjectType.isGroup(type);
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is a data set.
+     */
+    public boolean isDataSet()
+    {
+        return HDF5ObjectType.isDataSet(type);
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is a data type.
+     */
+    public boolean isDataType()
+    {
+        return HDF5ObjectType.isDataType(type);
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundByteifyerFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundByteifyerFactory.java
new file mode 100644
index 0000000..73ff839
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundByteifyerFactory.java
@@ -0,0 +1,374 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * A factory for {@link HDF5MemberByteifyer}s.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundByteifyerFactory
+{
+
+    private static List<IHDF5CompoundMemberBytifyerFactory> memberFactories =
+            new ArrayList<IHDF5CompoundMemberBytifyerFactory>(14);
+
+    static
+    {
+        memberFactories.add(new HDF5CompoundMemberByteifyerBooleanFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerIntFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerLongFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerShortFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerByteFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerFloatFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerDoubleFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerStringFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerBitSetFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerDateFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerHDF5TimeDurationFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerEnumFactory());
+        memberFactories.add(new HDF5CompoundMemberByteifyerEnumArrayFactory());
+    }
+
+    /**
+     * The type of access to the information.
+     */
+    enum AccessType
+    {
+        FIELD, MAP, LIST, ARRAY
+    }
+
+    /**
+     * The interface for member factories.
+     */
+    interface IHDF5CompoundMemberBytifyerFactory
+    {
+        /**
+         * Returns <code>true</code> if this factory can handle a member of type <code>clazz</code>.
+         */
+        boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull);
+
+        /**
+         * Creates a byteifyer.
+         */
+        HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+                HDF5CompoundMemberMapping member,
+                HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+                HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+                int memOffset, IFileAccessProvider fileInfoProvider);
+
+        /**
+         * Returns a suitable Java type, if this factory has one, or <code>null</code> otherwise.
+         */
+        Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+                HDF5DataTypeVariant typeVariantOrNull);
+    }
+
+    /**
+     * Returns a Java type overriding the one given by {@link HDF5DataClass}, if the factories have
+     * one, or <code>null</code> otherwise.
+     */
+    static Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        for (IHDF5CompoundMemberBytifyerFactory factory : memberFactories)
+        {
+            final Class<?> javaClassOrNull =
+                    factory.tryGetOverrideJavaType(dataClass, rank, elementSize, typeVariantOrNull);
+            if (javaClassOrNull != null)
+            {
+                return javaClassOrNull;
+            }
+        }
+        return null;
+    }
+
+    static HDF5MemberByteifyer[] createMemberByteifyers(Class<?> clazz,
+            IFileAccessProvider fileInfoProvider, CompoundTypeInformation compoundTypeInfoOrNull,
+            HDF5CompoundMemberMapping[] members)
+    {
+        final HDF5MemberByteifyer[] result = new HDF5MemberByteifyer[members.length];
+        int offsetOnDisk = 0;
+        int offsetInMemory = 0;
+        for (int i = 0; i < result.length; ++i)
+        {
+            final AccessType accessType = getAccessType(clazz);
+            final HDF5CompoundMemberInformation compoundMemberInfoOrNull =
+                    (compoundTypeInfoOrNull == null) ? null : compoundTypeInfoOrNull.getMember(i);
+            final Field fieldOrNull =
+                    (accessType == AccessType.FIELD) ? members[i].tryGetField(clazz,
+                            (compoundMemberInfoOrNull != null)) : null;
+            final Class<?> memberClazzOrNull =
+                    (fieldOrNull != null) ? fieldOrNull.getType() : members[i].tryGetMemberClass();
+            final IHDF5CompoundMemberBytifyerFactory factory =
+                    findFactory(memberClazzOrNull, compoundMemberInfoOrNull,
+                            members[i].getMemberName());
+            final HDF5EnumerationType enumTypeOrNullOrNull =
+                    (compoundTypeInfoOrNull == null) ? null : compoundTypeInfoOrNull.enumTypes[i];
+            if (compoundMemberInfoOrNull != null)
+            {
+                offsetOnDisk = compoundMemberInfoOrNull.getOffsetOnDisk();
+                offsetInMemory = compoundMemberInfoOrNull.getOffsetInMemory();
+            }
+            if (isDummy(accessType, fieldOrNull))
+            {
+                result[i] =
+                        new HDF5DummyMemberByteifyer(factory.createBytifyer(accessType,
+                                fieldOrNull, members[i], compoundMemberInfoOrNull,
+                                enumTypeOrNullOrNull, memberClazzOrNull, i, offsetOnDisk,
+                                offsetInMemory, fileInfoProvider));
+            } else
+            {
+                result[i] =
+                        factory.createBytifyer(accessType, fieldOrNull, members[i],
+                                compoundMemberInfoOrNull, enumTypeOrNullOrNull, memberClazzOrNull,
+                                i, offsetOnDisk, offsetInMemory, fileInfoProvider);
+            }
+            if (compoundMemberInfoOrNull == null)
+            {
+                final int size = result[i].getSize();
+                final int elementSize = result[i].getElementSize();
+                offsetOnDisk += size;
+                offsetInMemory = PaddingUtils.padOffset(offsetInMemory + size, elementSize);
+            }
+        }
+        return result;
+    }
+
+    //
+    // Dummy helpers
+    //
+
+    private static boolean isDummy(AccessType accessType, Field fieldOrNull)
+    {
+        return (accessType == AccessType.FIELD) && (fieldOrNull == null);
+    }
+
+    private static class HDF5DummyMemberByteifyer extends HDF5MemberByteifyer
+    {
+        private final HDF5MemberByteifyer delegate;
+
+        public HDF5DummyMemberByteifyer(HDF5MemberByteifyer delegate)
+        {
+            super(null, null, 0, 0, 0, false, null);
+            this.delegate = delegate;
+        }
+
+        @Override
+        int getElementSize()
+        {
+            return 0;
+        }
+
+        @Override
+        public byte[] byteify(int compoundDataTypeId, Object obj) throws IllegalAccessException
+        {
+            // Dummy implementation
+            return new byte[delegate.getSize()];
+        }
+
+        @Override
+        public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                int arrayOffset) throws IllegalAccessException
+        {
+            // Dummy implementation
+        }
+
+        @Override
+        protected int getMemberStorageTypeId()
+        {
+            return delegate.getMemberStorageTypeId();
+        }
+
+        @Override
+        protected int getMemberNativeTypeId()
+        {
+            return delegate.getMemberNativeTypeId();
+        }
+
+        @Override
+        public HDF5DataTypeVariant getTypeVariant()
+        {
+            return delegate.getTypeVariant();
+        }
+
+        @Override
+        public void insertType(int dataTypeId)
+        {
+            delegate.insertType(dataTypeId);
+        }
+
+        @Override
+        public void insertNativeType(int dataTypeId, HDF5 h5, ICleanUpRegistry registry)
+        {
+            delegate.insertNativeType(dataTypeId, h5, registry);
+        }
+
+        @Override
+        public int getMaxCharacters()
+        {
+            return delegate.getMaxCharacters();
+        }
+
+        @Override
+        public int getSize()
+        {
+            return delegate.getSize();
+        }
+
+        @Override
+        public int getOffsetOnDisk()
+        {
+            return delegate.getOffsetOnDisk();
+        }
+
+        @Override
+        public int getOffsetInMemory()
+        {
+            return delegate.getOffsetInMemory();
+        }
+
+        @Override
+        public int getTotalSizeOnDisk()
+        {
+            return delegate.getTotalSizeOnDisk();
+        }
+
+        @Override
+        public int getTotalSizeInMemory()
+        {
+            return delegate.getTotalSizeInMemory();
+        }
+
+        @Override
+        public String getMemberName()
+        {
+            return delegate.getMemberName();
+        }
+
+        @Override
+        public String describe()
+        {
+            return delegate.describe();
+        }
+
+        @Override
+        public boolean isDummy()
+        {
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return delegate.toString();
+        }
+
+    }
+
+    //
+    // Auxiliary getter and setter methods.
+    //
+
+    private static IHDF5CompoundMemberBytifyerFactory findFactory(Class<?> memberClazz,
+            HDF5CompoundMemberInformation memberInfoOrNull, String memberName)
+    {
+        if (memberClazz == null)
+        {
+            throw new IllegalArgumentException("No type given for member '" + memberName + "'.");
+        }
+        for (IHDF5CompoundMemberBytifyerFactory factory : memberFactories)
+        {
+            if (factory.canHandle(memberClazz, memberInfoOrNull))
+            {
+                return factory;
+            }
+        }
+        if (memberInfoOrNull == null)
+        {
+            throw new IllegalArgumentException("The member '" + memberName + "' is of type '"
+                    + memberClazz.getCanonicalName()
+                    + "' which cannot be handled by any HDFMemberByteifyer.");
+        } else
+        {
+            throw new IllegalArgumentException("The member '" + memberName + "' is of type '"
+                    + memberClazz.getCanonicalName() + "' [memory] and '"
+                    + memberInfoOrNull.getType()
+                    + "' [disk] which cannot be handled by any HDFMemberByteifyer.");
+        }
+    }
+
+    private static AccessType getAccessType(Class<?> clazz)
+    {
+        if (Map.class.isAssignableFrom(clazz))
+        {
+            return AccessType.MAP;
+        } else if (List.class.isAssignableFrom(clazz))
+        {
+            return AccessType.LIST;
+        } else if (Object[].class == clazz)
+        {
+            return AccessType.ARRAY;
+        } else
+        {
+            return AccessType.FIELD;
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    static Object getMap(Object obj, final String name)
+    {
+        return ((Map<String, Object>) obj).get(name);
+    }
+
+    @SuppressWarnings("unchecked")
+    static Object getList(Object obj, final int index)
+    {
+        return ((List<Object>) obj).get(index);
+    }
+
+    static Object getArray(Object obj, final int index)
+    {
+        return ((Object[]) obj)[index];
+    }
+
+    @SuppressWarnings("unchecked")
+    static void putMap(final Object obj, final String memberName, final Object value)
+    {
+        ((Map<String, Object>) obj).put(memberName, value);
+    }
+
+    @SuppressWarnings("unchecked")
+    static void setList(final Object obj, final int index, final Object value)
+    {
+        ((List<Object>) obj).set(index, value);
+    }
+
+    static void setArray(final Object obj, final int index, final Object value)
+    {
+        ((Object[]) obj)[index] = value;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataList.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataList.java
new file mode 100644
index 0000000..49a8673
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataList.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ * A list to be used to store the members of a compound. 
+ *
+ * @author Bernd Rinn
+ */
+public class HDF5CompoundDataList extends ArrayList<Object>
+{
+    private static final long serialVersionUID = 8683452581122892189L;
+    
+    /**
+     * @see ArrayList#ArrayList()
+     */
+    public HDF5CompoundDataList()
+    {
+        super();
+    }
+
+    /**
+     * @see ArrayList#ArrayList(Collection)
+     */
+    public HDF5CompoundDataList(Collection<? extends Object> c)
+    {
+        super(c);
+    }
+
+    /**
+     * @see ArrayList#ArrayList(int)
+     */
+    public HDF5CompoundDataList(int initialCapacity)
+    {
+        super(initialCapacity);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataMap.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataMap.java
new file mode 100644
index 0000000..b279662
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundDataMap.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A map to be used to store the member data of a compound. 
+ *
+ * @author Bernd Rinn
+ */
+public class HDF5CompoundDataMap extends HashMap<String, Object>
+{
+    private static final long serialVersionUID = 362498820763181265L;
+
+    /**
+     * @see HashMap#HashMap()
+     */
+    public HDF5CompoundDataMap()
+    {
+        super();
+    }
+
+    /**
+     * @see HashMap#HashMap(int, float)
+     */
+    public HDF5CompoundDataMap(int initialCapacity, float loadFactor)
+    {
+        super(initialCapacity, loadFactor);
+    }
+
+    /**
+     * @see HashMap#HashMap(int)
+     */
+    public HDF5CompoundDataMap(int initialCapacity)
+    {
+        super(initialCapacity);
+    }
+
+    /**
+     * @see HashMap#HashMap(Map)
+     */
+    public HDF5CompoundDataMap(Map<? extends String, ? extends Object> m)
+    {
+        super(m);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundInformationRetriever.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundInformationRetriever.java
new file mode 100644
index 0000000..17374a7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundInformationRetriever.java
@@ -0,0 +1,838 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_COMPOUND;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5CompoundInformationRetriever}.
+ * 
+ * @author Bernd Rinn
+ */
+abstract class HDF5CompoundInformationRetriever implements IHDF5CompoundInformationRetriever
+{
+
+    protected final HDF5BaseReader baseReader;
+
+    protected final IHDF5EnumTypeRetriever enumTypeRetriever;
+
+    HDF5CompoundInformationRetriever(HDF5BaseReader baseReader,
+            IHDF5EnumTypeRetriever enumTypeRetriever)
+    {
+        assert baseReader != null;
+        assert enumTypeRetriever != null;
+
+        this.baseReader = baseReader;
+        this.enumTypeRetriever = enumTypeRetriever;
+    }
+
+    @Override
+    public <T> HDF5CompoundMemberInformation[] getMemberInfo(final Class<T> compoundClass)
+    {
+        return getMemberInfo(compoundClass.getSimpleName());
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getMemberInfo(final String dataTypeName)
+    {
+        return getMemberInfo(dataTypeName, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getMemberInfo(final String dataTypeName,
+            final DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5CompoundMemberInformation[]> writeRunnable =
+                new ICallableWithCleanUp<HDF5CompoundMemberInformation[]>()
+                    {
+                        @Override
+                        public HDF5CompoundMemberInformation[] call(final ICleanUpRegistry registry)
+                        {
+                            final String dataTypePath =
+                                    HDF5Utils.createDataTypePath(HDF5Utils.COMPOUND_PREFIX,
+                                            baseReader.houseKeepingNameSuffix, dataTypeName);
+                            final int compoundDataTypeId =
+                                    baseReader.h5.openDataType(baseReader.fileId, dataTypePath,
+                                            registry);
+                            final CompoundTypeInformation compoundInformation =
+                                    getCompoundTypeInformation(compoundDataTypeId, dataTypePath,
+                                            dataTypeInfoOptions, registry);
+                            return compoundInformation.getCopyOfMembers();
+                        }
+                    };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getDataSetInfo(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        return getDataSetInfo(dataSetPath, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getDataSetInfo(final String dataSetPath,
+            final DataTypeInfoOptions dataTypeInfoOptions) throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<HDF5CompoundMemberInformation[]> infoRunnable =
+                new ICallableWithCleanUp<HDF5CompoundMemberInformation[]>()
+                    {
+                        @Override
+                        public HDF5CompoundMemberInformation[] call(final ICleanUpRegistry registry)
+                        {
+                            return getFullCompoundDataSetInformation(dataSetPath,
+                                    dataTypeInfoOptions, registry).getCopyOfMembers();
+                        }
+                    };
+        final HDF5CompoundMemberInformation[] compoundInformation =
+                baseReader.runner.call(infoRunnable);
+        return compoundInformation;
+    }
+
+    private CompoundTypeInformation getFullCompoundAttributeInformation(final String objectPath,
+            final String attributeName, final DataTypeInfoOptions dataTypeInfoOptions,
+            final ICleanUpRegistry registry) throws HDF5JavaException
+    {
+        final int dataSetId = baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+        final int attributeId = baseReader.h5.openAttribute(dataSetId, attributeName, registry);
+        final int storageDataTypeId = baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int compoundDataTypeId;
+        int classType = baseReader.h5.getClassType(storageDataTypeId);
+        if (classType == H5T_ARRAY)
+        {
+            compoundDataTypeId = baseReader.h5.getBaseDataType(storageDataTypeId, registry);
+            classType = baseReader.h5.getClassType(compoundDataTypeId);
+        } else
+        {
+            compoundDataTypeId = storageDataTypeId;
+        }
+        if (classType != H5T_COMPOUND)
+        {
+            throw new HDF5JavaException("Attribute '" + attributeName + "' of object '"
+                    + objectPath + "' is not of compound type.");
+        }
+        final String dataTypePathOrNull = baseReader.tryGetDataTypePath(compoundDataTypeId);
+        final CompoundTypeInformation compoundInformation =
+                getCompoundTypeInformation(compoundDataTypeId, dataTypePathOrNull,
+                        dataTypeInfoOptions, registry);
+        return compoundInformation;
+    }
+
+    private CompoundTypeInformation getFullCompoundDataSetInformation(final String dataSetPath,
+            final DataTypeInfoOptions dataTypeInfoOptions, final ICleanUpRegistry registry)
+            throws HDF5JavaException
+    {
+        final int dataSetId = baseReader.h5.openDataSet(baseReader.fileId, dataSetPath, registry);
+        final int compoundDataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+        if (baseReader.h5.getClassType(compoundDataTypeId) != H5T_COMPOUND)
+        {
+            throw new HDF5JavaException("Data set '" + dataSetPath + "' is not of compound type.");
+        }
+        // Note: the type variants for the compound members are stored at the compound type.
+        // So if we want to know the data set variant, we need to read the data type path as well.
+        final String dataTypePathOrNull =
+                (dataTypeInfoOptions.knowsDataTypePath() || dataTypeInfoOptions
+                        .knowsDataTypeVariant()) ? baseReader
+                        .tryGetDataTypePath(compoundDataTypeId) : null;
+        final CompoundTypeInformation compoundInformation =
+                getCompoundTypeInformation(compoundDataTypeId, dataTypePathOrNull,
+                        dataTypeInfoOptions, registry);
+        return compoundInformation;
+    }
+
+    private CompoundTypeInformation getFullCompoundDataTypeInformation(final String dataTypePath,
+            final DataTypeInfoOptions dataTypeInfoOptions, final ICleanUpRegistry registry)
+            throws HDF5JavaException
+    {
+        final int compoundDataTypeId =
+                baseReader.h5.openDataType(baseReader.fileId, dataTypePath, registry);
+        if (baseReader.h5.getClassType(compoundDataTypeId) != H5T_COMPOUND)
+        {
+            throw new HDF5JavaException("Data type '" + dataTypePath + "' is not a compound type.");
+        }
+        final CompoundTypeInformation compoundInformation =
+                getCompoundTypeInformation(compoundDataTypeId, dataTypePath, dataTypeInfoOptions,
+                        registry);
+        return compoundInformation;
+    }
+
+    CompoundTypeInformation getCompoundTypeInformation(final int compoundDataTypeId,
+            final String dataTypePathOrNull, final DataTypeInfoOptions dataTypeInfoOptions,
+            final ICleanUpRegistry registry)
+    {
+        final String typeName =
+                HDF5Utils.getDataTypeNameFromPath(dataTypePathOrNull,
+                        baseReader.houseKeepingNameSuffix, HDF5DataClass.COMPOUND);
+        final String[] memberNames =
+                baseReader.h5.getNamesForEnumOrCompoundMembers(compoundDataTypeId);
+        final int nativeCompoundDataTypeId =
+                baseReader.h5.getNativeDataType(compoundDataTypeId, registry);
+        final int recordSizeOnDisk = baseReader.h5.getDataTypeSize(compoundDataTypeId);
+        final int recordSizeInMemory = baseReader.h5.getDataTypeSize(nativeCompoundDataTypeId);
+        final CompoundTypeInformation compoundInfo =
+                new CompoundTypeInformation(typeName, compoundDataTypeId, nativeCompoundDataTypeId,
+                        memberNames.length, recordSizeOnDisk, recordSizeInMemory);
+        final HDF5DataTypeVariant[] memberTypeVariantsOrNull =
+                dataTypeInfoOptions.knowsDataTypeVariant() ? baseReader
+                        .tryGetTypeVariantForCompoundMembers(dataTypePathOrNull, registry) : null;
+        if (memberTypeVariantsOrNull != null
+                && memberTypeVariantsOrNull.length != memberNames.length)
+        {
+            throw new HDF5JavaException(
+                    "Invalid member data type variant information on committed data type '"
+                            + dataTypePathOrNull + "'.");
+        }
+        int offsetOnDisk = 0;
+        int offsetInMemory = 0;
+        for (int i = 0; i < memberNames.length; ++i)
+        {
+            final int dataTypeId =
+                    baseReader.h5.getDataTypeForIndex(compoundDataTypeId, i, registry);
+            // This should safe us from computing the offsets ourselves, but as it turns out, the
+            // offset for the native data type is wrong for bit fields,
+            // Test failing: HDF5RoundtripTest.testCompoundMap()
+            // Tested: 2014-07-28, HDF5 1.8.13
+            // offsetOnDisk = baseReader.h5.getOffsetForCompoundMemberIndex(compoundDataTypeId, i);
+            // offsetInMemory =
+            // baseReader.h5.getOffsetForCompoundMemberIndex(nativeCompoundDataTypeId, i);
+            compoundInfo.dataTypeIds[i] = dataTypeId;
+            final HDF5DataTypeInformation dataTypeInformation =
+                    baseReader.getDataTypeInformation(dataTypeId, dataTypeInfoOptions, registry);
+            if (memberTypeVariantsOrNull != null && memberTypeVariantsOrNull[i].isTypeVariant())
+            {
+                dataTypeInformation.setTypeVariant(memberTypeVariantsOrNull[i]);
+            }
+            final HDF5EnumerationType enumTypeOrNull;
+            if (dataTypeInformation.getDataClass() == HDF5DataClass.ENUM)
+            {
+                if (dataTypeInformation.isArrayType())
+                {
+                    final int baseDataSetType = baseReader.h5.getBaseDataType(dataTypeId, registry);
+                    enumTypeOrNull =
+                            baseReader.getEnumTypeForStorageDataType(null, baseDataSetType, false,
+                                    null, null, registry);
+                } else
+                {
+                    enumTypeOrNull =
+                            baseReader.getEnumTypeForStorageDataType(null, dataTypeId, false, null,
+                                    null, registry);
+                }
+            } else
+            {
+                enumTypeOrNull = null;
+            }
+            compoundInfo.enumTypes[i] = enumTypeOrNull;
+            if (enumTypeOrNull != null)
+            {
+                compoundInfo.setMember(i, new HDF5CompoundMemberInformation(memberNames[i],
+                        dataTypeInformation, offsetOnDisk, offsetInMemory, enumTypeOrNull
+                                .getEnumType().getValueArray()));
+            } else
+            {
+                compoundInfo.setMember(i, new HDF5CompoundMemberInformation(memberNames[i],
+                        dataTypeInformation, offsetOnDisk, offsetInMemory));
+            }
+            final HDF5DataTypeInformation typeInfo = compoundInfo.getMember(i).getType();
+            final int size = typeInfo.getSize();
+            offsetOnDisk += size;
+            offsetInMemory =
+                    PaddingUtils.padOffset(offsetInMemory + size,
+                            typeInfo.getElementSizeForPadding());
+
+        }
+        return compoundInfo;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getType(final String name, final Class<T> pojoClass,
+            boolean requireTypesToBeEqual, final HDF5CompoundMemberMapping... members)
+    {
+        baseReader.checkOpen();
+        final HDF5ValueObjectByteifyer<T> objectArrayifyer =
+                baseReader.createCompoundByteifyers(pojoClass, members, null);
+        return getType(name, -1, pojoClass, requireTypesToBeEqual, objectArrayifyer);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getType(final String name, final Class<T> pojoClass,
+            final HDF5CompoundMemberMapping... members)
+    {
+        return getType(name, pojoClass, true, members);
+    }
+
+    <T> HDF5CompoundType<T> getType(final String name, int committedDataTypeId,
+            final Class<T> compoundType, final boolean requireEqualsType,
+            final HDF5ValueObjectByteifyer<T> objectArrayifyer)
+    {
+        final int storageDataTypeId =
+                (committedDataTypeId < 0) ? baseReader
+                        .createStorageCompoundDataType(objectArrayifyer) : committedDataTypeId;
+        final int nativeDataTypeId = baseReader.createNativeCompoundDataType(objectArrayifyer);
+        return new HDF5CompoundType<T>(baseReader.fileId, storageDataTypeId, nativeDataTypeId,
+                name, compoundType, requireEqualsType, objectArrayifyer,
+                new HDF5CompoundType.IHDF5InternalCompoundMemberInformationRetriever()
+                    {
+                        @Override
+                        public HDF5CompoundMemberInformation[] getCompoundMemberInformation(
+                                final DataTypeInfoOptions dataTypeInfoOptions)
+                        {
+                            return HDF5CompoundInformationRetriever.this
+                                    .getCompoundMemberInformation(storageDataTypeId, name,
+                                            dataTypeInfoOptions);
+                        }
+                    }, baseReader);
+    }
+
+    HDF5CompoundMemberInformation[] getCompoundMemberInformation(final int storageDataTypeId,
+            final String dataTypeNameOrNull, final DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5CompoundMemberInformation[]> writeRunnable =
+                new ICallableWithCleanUp<HDF5CompoundMemberInformation[]>()
+                    {
+                        @Override
+                        public HDF5CompoundMemberInformation[] call(final ICleanUpRegistry registry)
+                        {
+                            final String dataTypePath =
+                                    (dataTypeNameOrNull == null) ? null : HDF5Utils
+                                            .createDataTypePath(HDF5Utils.COMPOUND_PREFIX,
+                                                    baseReader.houseKeepingNameSuffix,
+                                                    dataTypeNameOrNull);
+                            final CompoundTypeInformation compoundInformation =
+                                    getCompoundTypeInformation(storageDataTypeId, dataTypePath,
+                                            dataTypeInfoOptions, registry);
+                            return compoundInformation.getCopyOfMembers();
+                        }
+                    };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getType(final Class<T> pojoClass,
+            final HDF5CompoundMemberMapping... members)
+    {
+        return getType(null, pojoClass, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(String name, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(name, pojoClass, hints, true);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(String name, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        return getType(
+                name,
+                pojoClass,
+                requireTypesToBeEqual,
+                addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                        HDF5CompoundMemberMapping.inferMapping(pojoClass), hints)));
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(Class<T> pojoClass,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(null, pojoClass, hints);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final String name, final Class<T> pojoClass)
+    {
+        return getInferredType(name, pojoClass, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final Class<T> pojoClass)
+    {
+        return getInferredType(null, pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(String name, T pojo,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(name, pojo, hints, true);
+    }
+
+    @Override
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    public <T> HDF5CompoundType<T> getInferredType(String name, T pojo,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        if (Map.class.isInstance(pojo))
+        {
+            final String compoundTypeName =
+                    (name == null) ? HDF5CompoundMemberMapping.constructCompoundTypeName(
+                            ((Map) pojo).keySet(), true) : name;
+            return (HDF5CompoundType<T>) getType(
+                    compoundTypeName,
+                    Map.class,
+                    requireTypesToBeEqual,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                            HDF5CompoundMemberMapping.inferMapping((Map) pojo), hints)));
+        } else
+        {
+            final Class<T> pojoClass = (Class<T>) pojo.getClass();
+            return getType(name, pojoClass, requireTypesToBeEqual,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(HDF5CompoundMemberMapping
+                            .inferMapping(pojo, HDF5CompoundMemberMapping.inferEnumerationTypeMap(
+                                    pojo, enumTypeRetriever), HDF5CompoundMappingHints
+                                    .isUseVariableLengthStrings(hints)), hints)));
+        }
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final String name, final T[] pojo)
+    {
+        return getInferredType(name, pojo, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final T[] pojo)
+    {
+        return getInferredType((String) null, pojo, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(String name, T[] pojo,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(name, pojo, hints, true);
+    }
+    
+    @Override
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    public <T> HDF5CompoundType<T> getInferredType(String name, T[] pojo,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        final Class<?> componentType = pojo.getClass().getComponentType();
+        if (pojo.length == 0)
+        {
+            return (HDF5CompoundType<T>) getInferredType(name, componentType, hints);
+        }
+        if (Map.class.isAssignableFrom(componentType))
+        {
+            final String compoundTypeName =
+                    (name == null) ? HDF5CompoundMemberMapping.constructCompoundTypeName(
+                            ((Map) pojo[0]).keySet(), true) : name;
+            return (HDF5CompoundType<T>) getType(
+                    compoundTypeName,
+                    Map.class, 
+                    requireTypesToBeEqual,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                            HDF5CompoundMemberMapping.inferMapping((Map) pojo[0]), hints)));
+        } else
+        {
+            return (HDF5CompoundType<T>) getType(name, componentType, requireTypesToBeEqual,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(HDF5CompoundMemberMapping
+                            .inferMapping(pojo, HDF5CompoundMemberMapping.inferEnumerationTypeMap(
+                                    pojo, enumTypeRetriever),
+                                    hints == null ? false : hints.isUseVariableLengthStrings()),
+                            hints)));
+        }
+    }
+
+    HDF5CompoundMemberMapping[] addEnumTypes(HDF5CompoundMemberMapping[] mapping)
+    {
+        for (HDF5CompoundMemberMapping m : mapping)
+        {
+            final Class<?> memberClass = m.tryGetMemberClass();
+            if (memberClass != null)
+            {
+                if (memberClass.isEnum())
+                {
+                    @SuppressWarnings("unchecked")
+                    final Class<? extends Enum<?>> enumClass =
+                            (Class<? extends Enum<?>>) memberClass;
+                    final String typeName =
+                            (StringUtils.isBlank(m.tryGetEnumTypeName())) ? memberClass
+                                    .getSimpleName() : m.tryGetEnumTypeName();
+                    m.setEnumerationType(enumTypeRetriever.getType(typeName,
+                            ReflectionUtils.getEnumOptions(enumClass)));
+                } else if (memberClass == HDF5EnumerationValue.class
+                        || memberClass == HDF5EnumerationValueArray.class
+                        || memberClass == HDF5EnumerationValueMDArray.class)
+                {
+                    final HDF5CompoundMappingHints hintsOrNull = m.tryGetHints();
+                    final HDF5EnumerationType typeOrNull =
+                            (hintsOrNull != null) ? hintsOrNull.tryGetEnumType(m.getMemberName())
+                                    : null;
+                    if (typeOrNull != null)
+                    {
+                        m.setEnumerationType(typeOrNull);
+                    }
+                }
+            }
+        }
+        return mapping;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final String name, final T pojo)
+    {
+        return getInferredType(name, pojo, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredType(final T pojo)
+    {
+        return getInferredType(null, pojo);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> data, HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(name, memberNames, data, hints, true);
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> data, HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        final String compoundTypeName =
+                (name == null) ? HDF5CompoundMemberMapping.constructCompoundTypeName(memberNames,
+                        false) : name;
+        final HDF5CompoundType<?> type =
+                getType(compoundTypeName,
+                        List.class,
+                        requireTypesToBeEqual,
+                        HDF5CompoundMemberMapping.addHints(
+                                HDF5CompoundMemberMapping.inferMapping(memberNames, data), hints));
+        return (HDF5CompoundType<List<?>>) type;
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> data)
+    {
+        return getInferredType(name, memberNames, data, null);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredType(List<String> memberNames, List<?> data)
+    {
+        return getInferredType(null, memberNames, data);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredType(List<String> memberNames, List<?> data,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(null, memberNames, data, hints);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredType(String[] memberNames, Object[] data)
+    {
+        return getInferredType(null, memberNames, data);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] data)
+    {
+        final String compoundTypeName =
+                (name == null) ? HDF5CompoundMemberMapping.constructCompoundTypeName(
+                        Arrays.asList(memberNames), false) : name;
+        return getType(compoundTypeName, Object[].class,
+                HDF5CompoundMemberMapping.inferMapping(memberNames, data));
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] data, HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(name, memberNames, data, hints, true);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] data, HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        final String compoundTypeName =
+                (name == null) ? HDF5CompoundMemberMapping.constructCompoundTypeName(
+                        Arrays.asList(memberNames), false) : name;
+        return getType(compoundTypeName, Object[].class, requireTypesToBeEqual,
+                HDF5CompoundMemberMapping.inferMapping(memberNames, data, hints));
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredType(String[] memberNames, Object[] data,
+            HDF5CompoundMappingHints hints)
+    {
+        return getInferredType(null, memberNames, data, hints);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return getDataSetType(objectPath, pojoClass, true, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            boolean requireTypesToBeEqual, HDF5CompoundMemberMapping... members)
+    {
+        baseReader.checkOpen();
+        final CompoundTypeInformation cpdTypeInfo =
+                getFullCompoundDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL,
+                        baseReader.fileRegistry);
+        final HDF5CompoundType<T> typeForClass =
+                getType(cpdTypeInfo.name, cpdTypeInfo.compoundDataTypeId, pojoClass,
+                        requireTypesToBeEqual, createByteifyers(pojoClass, cpdTypeInfo, members));
+        return typeForClass;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints)
+    {
+        return getDataSetType(objectPath, pojoClass, hints, true);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual)
+    {
+        baseReader.checkOpen();
+        // We need to get ALL information for the type as otherwise the mapping might be wrong (due
+        // to a missing data type variant).
+        final CompoundTypeInformation cpdTypeInfo =
+                getFullCompoundDataSetInformation(objectPath, DataTypeInfoOptions.ALL,
+                        baseReader.fileRegistry);
+        final HDF5CompoundType<T> typeForClass =
+                getType(cpdTypeInfo.name, cpdTypeInfo.compoundDataTypeId, pojoClass,
+                        requireTypesToBeEqual, createByteifyers(pojoClass, cpdTypeInfo, hints));
+        return typeForClass;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass)
+    {
+        return getDataSetType(objectPath, pojoClass, (HDF5CompoundMappingHints) null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass)
+    {
+        return getAttributeType(objectPath, attributeName, pojoClass, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints)
+    {
+        return getAttributeType(objectPath, attributeName, pojoClass, hints,
+                DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return getAttributeType(objectPath, attributeName, pojoClass, hints, dataTypeInfoOptions,
+                true);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints,
+            DataTypeInfoOptions dataTypeInfoOptions, boolean requireTypesToBeEqual)
+    {
+        final CompoundTypeInformation cpdTypeInfo =
+                getFullCompoundAttributeInformation(objectPath, attributeName, dataTypeInfoOptions,
+                        baseReader.fileRegistry);
+        final HDF5CompoundType<T> typeForClass =
+                getType(cpdTypeInfo.name, cpdTypeInfo.compoundDataTypeId, pojoClass,
+                        requireTypesToBeEqual, createByteifyers(pojoClass, cpdTypeInfo, hints));
+        return typeForClass;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(Class<T> pojoClass)
+    {
+        return getNamedType(pojoClass.getSimpleName(), pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass)
+    {
+        return getNamedType(dataTypeName, pojoClass, null, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints)
+    {
+        return getNamedType(dataTypeName, pojoClass, hints, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return getNamedType(dataTypeName, pojoClass, null, dataTypeInfoOptions);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return getNamedType(dataTypeName, pojoClass, hints, dataTypeInfoOptions, true);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, DataTypeInfoOptions dataTypeInfoOptions,
+            boolean requireTypesToBeEqual)
+    {
+        final String dataTypePath =
+                HDF5Utils.createDataTypePath(HDF5Utils.COMPOUND_PREFIX,
+                        baseReader.houseKeepingNameSuffix, dataTypeName);
+        final CompoundTypeInformation cpdTypeInfo =
+                getFullCompoundDataTypeInformation(dataTypePath, dataTypeInfoOptions,
+                        baseReader.fileRegistry);
+        final HDF5CompoundType<T> typeForClass =
+                getType(dataTypeName, cpdTypeInfo.compoundDataTypeId, pojoClass,
+                        requireTypesToBeEqual, createByteifyers(pojoClass, cpdTypeInfo, hints));
+        return typeForClass;
+    }
+
+    private <T> HDF5ValueObjectByteifyer<T> createByteifyers(final Class<T> compoundClazz,
+            final CompoundTypeInformation compoundTypeInfo,
+            final HDF5CompoundMemberMapping[] mapping)
+    {
+        return baseReader.createCompoundByteifyers(compoundClazz, mapping, compoundTypeInfo);
+    }
+
+    private <T> HDF5ValueObjectByteifyer<T> createByteifyers(final Class<T> compoundClazz,
+            final CompoundTypeInformation compoundTypeInfo,
+            final HDF5CompoundMappingHints hintsOrNull)
+    {
+        return baseReader.createCompoundByteifyers(compoundClazz,
+                inferMemberMapping(compoundClazz, compoundTypeInfo, hintsOrNull), compoundTypeInfo);
+    }
+
+    private HDF5CompoundMemberMapping[] inferMemberMapping(final Class<?> compoundClazz,
+            final CompoundTypeInformation compoundTypeInfo,
+            final HDF5CompoundMappingHints hintsOrNull)
+    {
+        final List<HDF5CompoundMemberMapping> mapping =
+                new ArrayList<HDF5CompoundMemberMapping>(compoundTypeInfo.getNumberOfMembers());
+        final Map<String, Field> fields = ReflectionUtils.getFieldMap(compoundClazz);
+        for (int i = 0; i < compoundTypeInfo.getNumberOfMembers(); ++i)
+        {
+            final HDF5CompoundMemberInformation compoundMember = compoundTypeInfo.getMember(i);
+            final int compoundMemberTypeId = compoundTypeInfo.dataTypeIds[i];
+            final Field fieldOrNull = fields.get(compoundMember.getName());
+            final String memberName = compoundMember.getName();
+            final String fieldName = (fieldOrNull != null) ? fieldOrNull.getName() : memberName;
+            final HDF5DataTypeInformation typeInfo = compoundMember.getType();
+            final int[] dimensions = typeInfo.getDimensions();
+            if (typeInfo.getDataClass() == HDF5DataClass.ENUM)
+            {
+                if (dimensions.length == 0 || (dimensions.length == 1 && dimensions[0] == 1))
+                {
+                    mapping.add(HDF5CompoundMemberMapping.mapping(memberName).fieldName(fieldName)
+                            .enumType(compoundTypeInfo.enumTypes[i])
+                            .typeVariant(typeInfo.tryGetTypeVariant()));
+                } else if (dimensions.length == 1)
+                {
+                    mapping.add(HDF5CompoundMemberMapping.mappingWithStorageTypeId(
+                            fieldName,
+                            memberName,
+                            new HDF5EnumerationType(baseReader.fileId, -1, baseReader.h5
+                                    .getNativeDataType(compoundMemberTypeId,
+                                            baseReader.fileRegistry), baseReader
+                                    .getEnumDataTypeName(compoundMember.getType().tryGetName(),
+                                            compoundMemberTypeId), compoundMember
+                                    .tryGetEnumValues(), baseReader), dimensions,
+                            compoundMemberTypeId, typeInfo.tryGetTypeVariant()));
+                }
+            } else if (typeInfo.getDataClass() == HDF5DataClass.STRING)
+            {
+                if (fieldOrNull != null && (fieldOrNull.getType() != String.class)
+                        && (fieldOrNull.getType() != char[].class))
+                {
+                    throw new HDF5JavaException(
+                            "Field of string type does not correspond to string or char[] value");
+                }
+                mapping.add(HDF5CompoundMemberMapping.mappingArrayWithStorageId(fieldName,
+                        memberName, String.class, new int[]
+                            { typeInfo.getElementSize() }, compoundMemberTypeId, false,
+                        typeInfo.isVariableLengthString(), false, typeInfo.tryGetTypeVariant()));
+            } else if (typeInfo.getDataClass() == HDF5DataClass.REFERENCE)
+            {
+                if (fieldOrNull != null && (fieldOrNull.getType() != String.class)
+                        && (fieldOrNull.getType() != char[].class))
+                {
+                    throw new HDF5JavaException(
+                            "Field of rererence type does not correspond to string or char[] value");
+                }
+                mapping.add(HDF5CompoundMemberMapping.mappingArrayWithStorageId(fieldName,
+                        memberName, String.class, new int[]
+                            { typeInfo.getElementSize() }, compoundMemberTypeId, false,
+                        false, true, typeInfo.tryGetTypeVariant()));
+            } else
+            {
+                final Class<?> memberClazz;
+                if (fieldOrNull != null)
+                {
+                    memberClazz = fieldOrNull.getType();
+                } else
+                {
+                    memberClazz = typeInfo.tryGetJavaType();
+                }
+                mapping.add(HDF5CompoundMemberMapping.mappingArrayWithStorageId(fieldName,
+                        memberName, memberClazz, dimensions, compoundMemberTypeId,
+                        false == compoundMember.getType().isSigned(), false, false, 
+                        typeInfo.tryGetTypeVariant()));
+            }
+        }
+        return HDF5CompoundMemberMapping.addHints(
+                mapping.toArray(new HDF5CompoundMemberMapping[mapping.size()]), hintsOrNull);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMappingHints.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMappingHints.java
new file mode 100644
index 0000000..1ff7c2b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMappingHints.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A class to store general hints that can influence the compound member mapping.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5CompoundMappingHints
+{
+    public enum EnumReturnType
+    {
+        ORDINAL, STRING, JAVAENUMERATION, HDF5ENUMERATIONVALUE
+    }
+
+    private EnumReturnType enumReturnType = EnumReturnType.HDF5ENUMERATIONVALUE;
+
+    private boolean useVariableLengthStrings = false;
+
+    private Map<String, HDF5EnumerationType> enumerationTypeMap;
+
+    /**
+     * Returns the desired return type for enums.
+     */
+    public EnumReturnType getEnumReturnType()
+    {
+        return enumReturnType;
+    }
+
+    /**
+     * Sets the desired return type for enums.
+     */
+    public void setEnumReturnType(EnumReturnType enumReturnType)
+    {
+        this.enumReturnType = enumReturnType;
+    }
+
+    /**
+     * Sets the return type for enums .
+     * 
+     * @return This object (for chaining)
+     */
+    public HDF5CompoundMappingHints enumReturnType(@SuppressWarnings("hiding")
+    EnumReturnType enumReturnType)
+    {
+        this.enumReturnType = enumReturnType;
+        return this;
+    }
+
+    /**
+     * Adds an enum type mapping to this hints object.
+     * 
+     * @return The hint object.
+     */
+    public HDF5CompoundMappingHints enumTypeMapping(String memberName, HDF5EnumerationType enumType)
+    {
+        if (enumerationTypeMap == null)
+        {
+            enumerationTypeMap = new HashMap<String, HDF5EnumerationType>();
+        }
+        enumerationTypeMap.put(memberName, enumType);
+        return this;
+    }
+
+    /**
+     * Replaces the enum type mapping of this hints object.
+     * 
+     * @return The hint object.
+     */
+    public HDF5CompoundMappingHints enumTypeMapping(Map<String, HDF5EnumerationType> enumTypeMapping)
+    {
+        enumerationTypeMap = enumTypeMapping;
+        return this;
+    }
+
+    /**
+     * Returns the {@link HDF5EnumerationType} for the given <var>memberName</var>, or
+     * <code>null</code>, if no mapping is available for this member.
+     */
+    public HDF5EnumerationType tryGetEnumType(String memberName)
+    {
+        if (enumerationTypeMap == null)
+        {
+            return null;
+        }
+        return enumerationTypeMap.get(memberName);
+    }
+
+    /**
+     * Returns the desired enumeration return type.
+     */
+    public static EnumReturnType getEnumReturnType(HDF5CompoundMemberMapping mapping)
+    {
+        return (mapping.tryGetHints() == null) ? EnumReturnType.HDF5ENUMERATIONVALUE : mapping
+                .tryGetHints().getEnumReturnType();
+    }
+
+    /**
+     * Returns whether variable-length-string types should be used if the length is not set
+     * explicitly.
+     */
+    public static boolean isUseVariableLengthStrings(HDF5CompoundMappingHints hintsOrNull)
+    {
+        return hintsOrNull == null ? false : hintsOrNull.useVariableLengthStrings;
+    }
+
+    /**
+     * Returns whether variable-length-string types should be used if the length is not set
+     * explicitly.
+     */
+    public boolean isUseVariableLengthStrings()
+    {
+        return useVariableLengthStrings;
+    }
+
+    /**
+     * Sets whether variable-length-string types should be used if the length is not set explicitly.
+     */
+    public void setUseVariableLengthStrings(boolean useVariableLengthStrings)
+    {
+        this.useVariableLengthStrings = useVariableLengthStrings;
+    }
+
+    /**
+     * Sets that variable-length-string types should be used if the length is not set explicitly.
+     * 
+     * @return The hint object.
+     */
+    public HDF5CompoundMappingHints useVariableLengthStrings()
+    {
+        this.useVariableLengthStrings = true;
+        return this;
+    }
+
+    /**
+     * Sets whether variable-length-string types should be used if the length is not set explicitly.
+     * 
+     * @return The hint object.
+     */
+    public HDF5CompoundMappingHints useVariableLengthStrings(@SuppressWarnings("hiding")
+    boolean useVariableLengthStrings)
+    {
+        this.useVariableLengthStrings = useVariableLengthStrings;
+        return this;
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBitSetFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBitSetFactory.java
new file mode 100644
index 0000000..de1a680
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBitSetFactory.java
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.LONG_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_B64LE;
+
+import java.lang.reflect.Field;
+import java.util.BitSet;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>BitSet</code>
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerBitSetFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            return (clazz == BitSet.class)
+                    && memberInfoOrNull.getType().getDataClass() == HDF5DataClass.BITFIELD;
+        } else
+        {
+            return (clazz == BitSet.class);
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(final AccessType accessType, final Field fieldOrNull,
+            final HDF5CompoundMemberMapping member,
+            final HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, final Class<?> memberClazz, final int index,
+            final int offset, int memOffset, final IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final int memberTypeLengthInLongs;
+        if (compoundMemberInfoOrNull == null)
+        {
+            final int memberTypeLengthInBits = member.getMemberTypeLength();
+            memberTypeLengthInLongs =
+                    memberTypeLengthInBits / 64 + (memberTypeLengthInBits % 64 != 0 ? 1 : 0);
+        } else
+        {
+            memberTypeLengthInLongs = compoundMemberInfoOrNull.getType().getNumberOfElements();
+        }
+
+        if (memberTypeLengthInLongs <= 0)
+        {
+            throw new IllegalArgumentException(
+                    "Length of a bit field must be a positive number (len="
+                            + memberTypeLengthInLongs + ").");
+        }
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                (storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(H5T_STD_B64LE,
+                        memberTypeLengthInLongs) : storageTypeId;
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        memberTypeLengthInLongs, memberTypeId, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset,
+                        memberTypeLengthInLongs, memberTypeId, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset,
+                        memberTypeLengthInLongs, memberTypeId, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        memberTypeLengthInLongs, memberTypeId, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int memberTypeLengthInLongs,
+            final int memberTypeId, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, memberTypeLengthInLongs * LONG_SIZE,
+                offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final BitSet bs = (BitSet) field.get(obj);
+                    return HDFNativeData.longToByte(BitSetConversionUtils.toStorageForm(bs));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final BitSet bs =
+                            BitSetConversionUtils.fromStorageForm(HDFNativeData.byteToLong(byteArr,
+                                    arrayOffset + offsetInMemory, memberTypeLengthInLongs));
+                    field.set(obj, bs);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int memberTypeLengthInLongs, final int memberTypeId,
+            final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, memberTypeLengthInLongs * LONG_SIZE,
+                offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final BitSet bs = (BitSet) getMap(obj, memberName);
+                    return HDFNativeData.longToByte(BitSetConversionUtils.toStorageForm(bs));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final BitSet bitSet =
+                            BitSetConversionUtils.fromStorageForm(HDFNativeData.byteToLong(byteArr,
+                                    arrayOffset + offsetInMemory, memberTypeLengthInLongs));
+                    putMap(obj, memberName, bitSet);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int memberTypeLengthInLongs,
+            final int memberTypeId, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, memberTypeLengthInLongs * LONG_SIZE,
+                offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final BitSet bs = (BitSet) getList(obj, index);
+                    return HDFNativeData.longToByte(BitSetConversionUtils.toStorageForm(bs));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final BitSet bitSet =
+                            BitSetConversionUtils.fromStorageForm(HDFNativeData.byteToLong(byteArr,
+                                    arrayOffset + offsetInMemory, memberTypeLengthInLongs));
+                    setList(obj, index, bitSet);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int memberTypeLengthInLongs,
+            final int memberTypeId, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, memberTypeLengthInLongs * LONG_SIZE,
+                offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final BitSet bs = (BitSet) getArray(obj, index);
+                    return HDFNativeData.longToByte(BitSetConversionUtils.toStorageForm(bs));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final BitSet bitSet =
+                            BitSetConversionUtils.fromStorageForm(HDFNativeData.byteToLong(byteArr,
+                                    arrayOffset + offsetInMemory, memberTypeLengthInLongs));
+                    setArray(obj, index, bitSet);
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBooleanFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBooleanFactory.java
new file mode 100644
index 0000000..9cd255f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerBooleanFactory.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+
+import java.lang.reflect.Field;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for
+ * <code>boolean</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerBooleanFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataClass dataClass = memberInfoOrNull.getType().getDataClass();
+            return (clazz == boolean.class)
+                    && (dataClass == HDF5DataClass.BOOLEAN || dataClass == HDF5DataClass.INTEGER);
+        } else
+        {
+            return (clazz == boolean.class);
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(final AccessType accessType, final Field fieldOrNull,
+            final HDF5CompoundMemberMapping member,
+            final HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, final Class<?> memberClazz, final int index,
+            final int offset, int memOffset, final IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        // May be -1 if not known
+        final int memberTypeId = member.getStorageDataTypeId();
+        final int booleanDataTypeId =
+                (memberTypeId < 0) ? fileInfoProvider.getBooleanDataTypeId() : memberTypeId;
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        booleanDataTypeId, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, booleanDataTypeId,
+                        member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, booleanDataTypeId,
+                        member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        booleanDataTypeId, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int booleanDataTypeId,
+            final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, 1, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return booleanDataTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return HDFNativeData.byteToByte((byte) (field.getBoolean(obj) ? 1 : 0));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final boolean value = (byteArr[arrayOffset + offsetInMemory] == 0) ? false : true;
+                    field.setBoolean(obj, value);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int booleanDataTypeId, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, 1, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return booleanDataTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return HDFNativeData.byteToByte((byte) (((Boolean) getMap(obj, memberName)) ? 1
+                            : 0));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final boolean value = (byteArr[arrayOffset + offsetInMemory] == 0) ? false : true;
+                    putMap(obj, memberName, value);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int booleanDataTypeId,
+            final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, 1, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return booleanDataTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return HDFNativeData
+                            .byteToByte((byte) (((Boolean) getList(obj, index)) ? 1 : 0));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final boolean value = (byteArr[arrayOffset + offsetInMemory] == 0) ? false : true;
+                    setList(obj, index, value);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int booleanDataTypeId,
+            final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, 1, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return booleanDataTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return HDFNativeData.byteToByte((byte) (((Boolean) getArray(obj, index)) ? 1
+                            : 0));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final boolean value = (byteArr[arrayOffset + offsetInMemory] == 0) ? false : true;
+                    setArray(obj, index, value);
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerByteFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerByteFactory.java
new file mode 100644
index 0000000..be9c8bf
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerByteFactory.java
@@ -0,0 +1,534 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I8LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>byte</code>,
+ * <code>byte[]</code>, <code>byte[][]</code> and <code>MDByteArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerByteFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(byte.class, 0), ARRAY1D(byte[].class, 1), ARRAY2D(byte[][].class, 2), ARRAYMD(
+                MDByteArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null || typeInfo.getDataClass() != HDF5DataClass.INTEGER
+                    || typeInfo.getElementSize() != 1)
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? (member.isUnsigned() ? H5T_STD_U8LE : H5T_STD_I8LE)
+                        : ((storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(
+                                member.isUnsigned() ? H5T_STD_U8LE : H5T_STD_I8LE, dimensions)
+                                : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, len, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.byteToByte(field.getByte(obj));
+                        case ARRAY1D:
+                            return (byte[]) field.get(obj);
+                        case ARRAY2D:
+                        {
+                            final byte[][] array = (byte[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return MatrixUtils.flatten(array);
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDByteArray array = (MDByteArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return array.getAsFlatArray();
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setByte(obj, byteArr[arrayOffset + offsetInMemory]);
+                            break;
+                        case ARRAY1D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            field.set(obj, array);
+                            break;
+                        }
+                        case ARRAY2D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            field.set(obj, new MDByteArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, len, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.byteToByte((((Number) getMap(obj, memberName))
+                                    .byteValue()));
+                        case ARRAY1D:
+                            return (byte[]) getMap(obj, memberName);
+                        case ARRAY2D:
+                        {
+                            final byte[][] array = (byte[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return MatrixUtils.flatten(array);
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDByteArray array = (MDByteArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return array.getAsFlatArray();
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName, byteArr[arrayOffset + offsetInMemory]);
+                            break;
+                        case ARRAY1D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            putMap(obj, memberName, array);
+                            break;
+                        }
+                        case ARRAY2D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            putMap(obj, memberName, new MDByteArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, len, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.byteToByte((((Number) getList(obj, index))
+                                    .byteValue()));
+                        case ARRAY1D:
+                            return (byte[]) getList(obj, index);
+                        case ARRAY2D:
+                        {
+                            final byte[][] array = (byte[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return MatrixUtils.flatten(array);
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDByteArray array = (MDByteArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return array.getAsFlatArray();
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index, byteArr[arrayOffset + offsetInMemory]);
+                            break;
+                        case ARRAY1D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setList(obj, index, array);
+                            break;
+                        }
+                        case ARRAY2D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setList(obj, index, new MDByteArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, len, offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 1;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.byteToByte((((Number) getArray(obj, index))
+                                    .byteValue()));
+                        case ARRAY1D:
+                            return (byte[]) getArray(obj, index);
+                        case ARRAY2D:
+                        {
+                            final byte[][] array = (byte[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return MatrixUtils.flatten(array);
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDByteArray array = (MDByteArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return array.getAsFlatArray();
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index, byteArr[arrayOffset + offsetInMemory]);
+                            break;
+                        case ARRAY1D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setArray(obj, index, array);
+                            break;
+                        }
+                        case ARRAY2D:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final byte[] array = new byte[len];
+                            System.arraycopy(byteArr, arrayOffset + offsetInMemory, array, 0,
+                                    array.length);
+                            setArray(obj, index, new MDByteArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDateFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDateFactory.java
new file mode 100644
index 0000000..c620564
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDateFactory.java
@@ -0,0 +1,297 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.LONG_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+
+import java.lang.reflect.Field;
+import java.util.Date;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>Date</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerDateFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            return (clazz == Date.class || Long.class.isAssignableFrom(clazz))
+                    && memberInfoOrNull.getType().isTimeStamp();
+        } else
+        {
+            return (clazz == Date.class);
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        if (dataClass == HDF5DataClass.INTEGER
+                && rank == 0
+                && elementSize == 8
+                && typeVariantOrNull == HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH)
+        {
+            return java.util.Date.class;
+        } else
+        {
+            return null;
+        }
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final HDF5DataTypeVariant typeVariant =
+                HDF5DataTypeVariant.isTypeVariant(member.tryGetTypeVariant()) ? member
+                        .tryGetTypeVariant()
+                        : HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH;
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        typeVariant);
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, typeVariant);
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, typeVariant);
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        typeVariant);
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return HDFNativeData.longToByte(((java.util.Date) field.get(obj)).getTime());
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    field.set(
+                            obj,
+                            new java.util.Date(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory)));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object dateObj = getMap(obj, memberName);
+                    if (dateObj instanceof java.util.Date)
+                    {
+                        return HDFNativeData.longToByte(((java.util.Date) dateObj).getTime());
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Long) dateObj));
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    putMap(obj,
+                            memberName,
+                            new java.util.Date(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory)));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object dateObj = getList(obj, index);
+                    if (dateObj instanceof java.util.Date)
+                    {
+                        return HDFNativeData.longToByte(((java.util.Date) dateObj).getTime());
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Long) dateObj));
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    setList(obj,
+                            index,
+                            new java.util.Date(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory)));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object dateObj = getArray(obj, index);
+                    if (dateObj instanceof java.util.Date)
+                    {
+                        return HDFNativeData.longToByte(((java.util.Date) dateObj).getTime());
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Long) dateObj));
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    setArray(
+                            obj,
+                            index,
+                            new java.util.Date(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory)));
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDoubleFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDoubleFactory.java
new file mode 100644
index 0000000..ef206e0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerDoubleFactory.java
@@ -0,0 +1,520 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.DOUBLE_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F64LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>double</code>
+ * , <code>double[]</code>, <code>double[][]</code> and <code>MDDoubleArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerDoubleFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(double.class, 0), ARRAY1D(double[].class, 1), ARRAY2D(double[][].class, 2), ARRAYMD(
+                MDDoubleArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null || typeInfo.getDataClass() != HDF5DataClass.FLOAT
+                    || typeInfo.getElementSize() != DOUBLE_SIZE)
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? H5T_IEEE_F64LE : ((storageTypeId < 0) ? fileInfoProvider
+                        .getArrayTypeId(H5T_IEEE_F64LE, dimensions) : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, DOUBLE_SIZE * len, offset,
+                memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.doubleToByte(field.getDouble(obj));
+                        case ARRAY1D:
+                            return HDFNativeData.doubleToByte((double[]) field.get(obj));
+                        case ARRAY2D:
+                        {
+                            final double[][] array = (double[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDDoubleArray array = (MDDoubleArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setDouble(obj,
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            field.set(obj, HDFNativeData.byteToDouble(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            field.set(obj, new MDDoubleArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, DOUBLE_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.doubleToByte(((Number) getMap(obj, memberName))
+                                    .doubleValue());
+                        case ARRAY1D:
+                            return HDFNativeData.doubleToByte((double[]) getMap(obj, memberName));
+                        case ARRAY2D:
+                        {
+                            final double[][] array = (double[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDDoubleArray array = (MDDoubleArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToDouble(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            putMap(obj, memberName, new MDDoubleArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, DOUBLE_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.doubleToByte(((Number) getList(obj, index))
+                                    .doubleValue());
+                        case ARRAY1D:
+                            return HDFNativeData.doubleToByte((double[]) getList(obj, index));
+                        case ARRAY2D:
+                        {
+                            final double[][] array = (double[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDDoubleArray array = (MDDoubleArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index,
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToDouble(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            setList(obj, index, new MDDoubleArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, DOUBLE_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.doubleToByte(((Number) getArray(obj, index))
+                                    .doubleValue());
+                        case ARRAY1D:
+                            return HDFNativeData.doubleToByte((double[]) getArray(obj, index));
+                        case ARRAY2D:
+                        {
+                            final double[][] array = (double[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDDoubleArray array = (MDDoubleArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.doubleToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            setArray(obj, index, HDFNativeData.byteToDouble(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final double[] array =
+                                    HDFNativeData.byteToDouble(byteArr, arrayOffset + offsetInMemory,
+                                            len);
+                            setArray(obj, index, new MDDoubleArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumArrayFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumArrayFactory.java
new file mode 100644
index 0000000..d7cb0a0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumArrayFactory.java
@@ -0,0 +1,444 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundMappingHints.getEnumReturnType;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundMemberByteifyerEnumFactory.getEnumReturnTypeFromField;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5CompoundMappingHints.EnumReturnType;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for
+ * <code>HDF5EnumerationValueArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerEnumArrayFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            return ((clazz == HDF5EnumerationValueArray.class)
+                    || (clazz.isArray() && clazz.getComponentType().isEnum())
+                    || clazz == String[].class || (clazz.isArray() && (Number.class
+                    .isAssignableFrom(clazz.getComponentType()) || (clazz.getComponentType()
+                    .isPrimitive() && clazz.getComponentType() != boolean.class))))
+                    && memberInfoOrNull.getType().getDataClass() == HDF5DataClass.ENUM;
+        } else
+        {
+            return (clazz == HDF5EnumerationValueArray.class)
+                    || (clazz.isArray() && clazz.getComponentType().isEnum());
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(final AccessType accessType, final Field fieldOrNull,
+            final HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType compoundMemberInfoEnumTypeOrNull, Class<?> memberClazz,
+            final int index, final int offset, int memOffset, final IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        HDF5EnumerationType enumTypeOrNull =
+                member.tryGetEnumerationType() != null ? member.tryGetEnumerationType()
+                        : compoundMemberInfoEnumTypeOrNull;
+        if (enumTypeOrNull == null)
+        {
+            if (fieldOrNull.getType().isArray()
+                    && fieldOrNull.getType().getComponentType().isEnum())
+            {
+                @SuppressWarnings("unchecked")
+                Class<? extends Enum<?>> enumClass =
+                        (Class<? extends Enum<?>>) fieldOrNull.getType().getComponentType();
+                enumTypeOrNull =
+                        fileInfoProvider.getEnumType(ReflectionUtils.getEnumOptions(enumClass));
+            } else
+            {
+                throw new HDF5JavaException("Enumeration type not known for member byteifyer.");
+            }
+        }
+        final int memberTypeLength =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : member.getMemberTypeLength();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberStorageTypeId =
+                (storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(
+                        enumTypeOrNull.getStorageTypeId(), memberTypeLength) : storageTypeId;
+        switch (accessType)
+        {
+            case FIELD:
+            {
+                if (fieldOrNull == null)
+                {
+                    throw new HDF5JavaException("No field for member " + memberName + ".");
+                }
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        enumTypeOrNull, memberTypeLength, memberStorageTypeId, member.tryGetTypeVariant(),
+                        getEnumReturnTypeFromField(fieldOrNull.getType()));
+            }
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, enumTypeOrNull,
+                        memberTypeLength, memberStorageTypeId, member.tryGetTypeVariant(),
+                        getEnumReturnType(member));
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, enumTypeOrNull,
+                        memberTypeLength, memberStorageTypeId, member.tryGetTypeVariant(),
+                        getEnumReturnType(member));
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        enumTypeOrNull, memberTypeLength, memberStorageTypeId, member.tryGetTypeVariant(),
+                        getEnumReturnType(member));
+        }
+        throw new Error("Unknown access type");
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final int memberTypeLength, final int memberStorageTypeId, final HDF5DataTypeVariant typeVariant,
+            final EnumReturnType enumReturnType)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, enumType.getStorageForm()
+                .getStorageSize() * memberTypeLength, offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberStorageTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnumArray(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValueArray =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    memberTypeLength, enumReturnType, field);
+                    field.set(obj, enumValueArray);
+                }
+
+                private HDF5EnumerationValueArray getEnumArray(Object obj)
+                        throws IllegalAccessException, IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumArrayObj = field.get(obj);
+                    return getEnumArrayFromField(enumArrayObj, enumType, enumReturnType);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final HDF5EnumerationType enumType, final int memberTypeLength,
+            final int memberStorageTypeId, final HDF5DataTypeVariant typeVariant,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName, enumType.getStorageForm().getStorageSize()
+                * memberTypeLength, offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberStorageTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnumArray(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValueArray =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    memberTypeLength, enumReturnType, null);
+                    putMap(obj, memberName, enumValueArray);
+                }
+
+                private HDF5EnumerationValueArray getEnumArray(Object obj)
+                        throws IllegalAccessException, IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumArrayObj = getMap(obj, memberName);
+                    return guessEnumArray(enumArrayObj, enumType);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final int memberTypeLength, final int memberStorageTypeId, final HDF5DataTypeVariant typeVariant,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName, enumType.getStorageForm().getStorageSize()
+                * memberTypeLength, offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberStorageTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnumArray(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValueArray =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    memberTypeLength, enumReturnType, null);
+                    setList(obj, index, enumValueArray);
+                }
+
+                private HDF5EnumerationValueArray getEnumArray(Object obj)
+                        throws IllegalAccessException, IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumArrayObj = getList(obj, index);
+                    return guessEnumArray(enumArrayObj, enumType);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final int memberTypeLength, final int memberStorageTypeId, final HDF5DataTypeVariant typeVariant,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName, enumType.getStorageForm().getStorageSize()
+                * memberTypeLength, offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberStorageTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnumArray(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValueArray =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    memberTypeLength, enumReturnType, null);
+                    setArray(obj, index, enumValueArray);
+                }
+
+                private HDF5EnumerationValueArray getEnumArray(Object obj)
+                        throws IllegalAccessException, IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumArrayObj = getArray(obj, index);
+                    return guessEnumArray(enumArrayObj, enumType);
+                }
+            };
+    }
+
+    static HDF5EnumerationValueArray guessEnumArray(final Object enumArrayObj,
+            final HDF5EnumerationType enumType)
+    {
+        if (enumArrayObj instanceof HDF5EnumerationValueArray)
+        {
+            return (HDF5EnumerationValueArray) enumArrayObj;
+        } else if (enumArrayObj instanceof int[])
+        {
+            return new HDF5EnumerationValueArray(enumType, (int[]) enumArrayObj);
+        } else if (enumArrayObj instanceof String[])
+        {
+            return new HDF5EnumerationValueArray(enumType, (String[]) enumArrayObj);
+        } else if (enumArrayObj.getClass().isArray()
+                && enumArrayObj.getClass().getComponentType().isEnum())
+        {
+            return new HDF5EnumerationValueArray(enumType, (Enum<?>[]) enumArrayObj);
+        } else
+        {
+            final String[] options = new String[Array.getLength(enumArrayObj)];
+            for (int i = 0; i < options.length; ++i)
+            {
+                options[i] = Array.get(enumArrayObj, i).toString();
+            }
+            return new HDF5EnumerationValueArray(enumType, options);
+        }
+    }
+
+    static HDF5EnumerationValueArray getEnumArrayFromField(final Object enumArrayObj,
+            final HDF5EnumerationType enumType,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        switch (enumReturnType)
+        {
+            case HDF5ENUMERATIONVALUE:
+                return (HDF5EnumerationValueArray) enumArrayObj;
+            case STRING:
+                return new HDF5EnumerationValueArray(enumType, (String[]) enumArrayObj);
+            case ORDINAL:
+                return new HDF5EnumerationValueArray(enumType, enumArrayObj);
+            case JAVAENUMERATION:
+            {
+                return new HDF5EnumerationValueArray(enumType, (Enum<?>[]) enumArrayObj);
+            }
+        }
+        throw new Error("Unknown EnumReturnType " + enumReturnType);
+    }
+
+    static Object getEnumValue(final HDF5EnumerationType enumType, byte[] byteArr, int arrayOffset,
+            final int length, final HDF5CompoundMappingHints.EnumReturnType enumReturnType,
+            Field fieldOrNull)
+    {
+        switch (enumReturnType)
+        {
+            case HDF5ENUMERATIONVALUE:
+                return HDF5EnumerationValueArray.fromStorageForm(enumType, byteArr, arrayOffset,
+                        length);
+            case STRING:
+                return HDF5EnumerationValueArray.fromStorageFormToStringArray(enumType, byteArr,
+                        arrayOffset, length);
+            case ORDINAL:
+                return HDF5EnumerationValueArray.fromStorageFormToIntArray(enumType, byteArr,
+                        arrayOffset, length);
+            case JAVAENUMERATION:
+            {
+                if (fieldOrNull == null)
+                {
+                    throw new HDF5JavaException(
+                            "JAVAENUMERATIONTYPE only available with access type FIELD");
+                }
+                final String[] values =
+                        HDF5EnumerationValueArray.fromStorageFormToStringArray(enumType, byteArr,
+                                arrayOffset, length);
+                @SuppressWarnings("unchecked")
+                final Class<Enum<?>> enumClass =
+                        (Class<Enum<?>>) fieldOrNull.getType().getComponentType();
+                final Enum<?>[] result =
+                        (Enum<?>[]) Array.newInstance(fieldOrNull.getType().getComponentType(),
+                                values.length);
+                for (int i = 0; i < result.length; ++i)
+                {
+                    result[i] = getValue(enumClass, values[i]);
+                }
+                return result;
+            }
+        }
+        throw new Error("Unknown EnumReturnType " + enumReturnType);
+    }
+
+    /**
+     * Returns the value as Enum of type <var>enumClass</var>.
+     */
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    private static Enum<?> getValue(Class<? extends Enum<?>> enumClass, String value)
+    {
+        return Enum.valueOf((Class<Enum>) enumClass, value);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumFactory.java
new file mode 100644
index 0000000..1bf81e1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerEnumFactory.java
@@ -0,0 +1,423 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundMappingHints.getEnumReturnType;
+
+import java.lang.reflect.Field;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5CompoundMappingHints.EnumReturnType;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for
+ * <code>HDF5EnumerationValue</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerEnumFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            return ((clazz == HDF5EnumerationValue.class) || clazz.isEnum()
+                    || clazz == String.class || Number.class.isAssignableFrom(clazz) || (clazz
+                    .isPrimitive() && clazz != boolean.class))
+                    && memberInfoOrNull.getType().getDataClass() == HDF5DataClass.ENUM;
+        } else
+        {
+            return (clazz == HDF5EnumerationValue.class) || clazz.isEnum();
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(final AccessType accessType, final Field fieldOrNull,
+            final HDF5CompoundMemberMapping member,
+            final HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType compoundMemberInfoEnumTypeOrNull, final Class<?> memberClazz,
+            final int index, final int offset, int memOffset, final IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final HDF5EnumerationType enumTypeOrNull =
+                member.tryGetEnumerationType() != null ? member.tryGetEnumerationType()
+                        : compoundMemberInfoEnumTypeOrNull;
+        if (enumTypeOrNull == null)
+        {
+            throw new HDF5JavaException("Enumeration type for member '" + memberName
+                    + "' not known for member byteifyer.");
+        }
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        enumTypeOrNull, member.tryGetTypeVariant(), getEnumReturnTypeFromField(fieldOrNull.getType()));
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, enumTypeOrNull,
+                        member.tryGetTypeVariant(), getEnumReturnType(member));
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, enumTypeOrNull,
+                        member.tryGetTypeVariant(), getEnumReturnType(member));
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        enumTypeOrNull, member.tryGetTypeVariant(), getEnumReturnType(member));
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    /**
+     * Returns the desired enumeration return type.
+     */
+    static EnumReturnType getEnumReturnTypeFromField(Class<?> type)
+    {
+        final Class<?> clazz = type.isArray() ? type.getComponentType() : type;
+        if (Number.class.isAssignableFrom(clazz) || (clazz.isPrimitive() && clazz != boolean.class))
+        {
+            return EnumReturnType.ORDINAL;
+        } else if (String.class == clazz)
+        {
+            return EnumReturnType.STRING;
+        } else if (clazz.isEnum())
+        {
+            return EnumReturnType.JAVAENUMERATION;
+        }
+        {
+            return EnumReturnType.HDF5ENUMERATIONVALUE;
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final HDF5DataTypeVariant typeVariant, final EnumReturnType enumReturnType)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, enumType.getStorageForm()
+                .getStorageSize(), offset, memOffset, false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return enumType.getStorageTypeId();
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return enumType.getNativeTypeId();
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnum(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValue =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    enumReturnType, field);
+                    field.set(obj, enumValue);
+                }
+
+                private HDF5EnumerationValue getEnum(Object obj) throws IllegalAccessException,
+                        IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumObj = field.get(obj);
+                    if (enumObj instanceof HDF5EnumerationValue)
+                    {
+                        return (HDF5EnumerationValue) enumObj;
+                    } else if (enumObj instanceof Number)
+                    {
+                        return new HDF5EnumerationValue(enumType, ((Number) enumObj).intValue());
+                    } else
+                    {
+                        return new HDF5EnumerationValue(enumType, enumObj.toString());
+                    }
+                }
+
+                @Override
+                HDF5EnumerationType tryGetEnumType()
+                {
+                    return enumType;
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final HDF5EnumerationType enumType, final HDF5DataTypeVariant typeVariant,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName,
+                enumType.getStorageForm().getStorageSize(), offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return enumType.getStorageTypeId();
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return enumType.getNativeTypeId();
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnum(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValue =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    enumReturnType, null);
+                    putMap(obj, memberName, enumValue);
+                }
+
+                private HDF5EnumerationValue getEnum(Object obj) throws IllegalAccessException,
+                        IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumObj = getMap(obj, memberName);
+                    if (enumObj instanceof HDF5EnumerationValue)
+                    {
+                        return (HDF5EnumerationValue) enumObj;
+                    } else if (enumObj instanceof Number)
+                    {
+                        return new HDF5EnumerationValue(enumType, ((Number) enumObj).intValue());
+                    } else
+                    {
+                        return new HDF5EnumerationValue(enumType, enumObj.toString());
+                    }
+                }
+
+                @Override
+                HDF5EnumerationType tryGetEnumType()
+                {
+                    return enumType;
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final HDF5DataTypeVariant typeVariant, final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName,
+                enumType.getStorageForm().getStorageSize(), offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return enumType.getStorageTypeId();
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return enumType.getNativeTypeId();
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnum(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValue =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    enumReturnType, null);
+                    setList(obj, index, enumValue);
+                }
+
+                private HDF5EnumerationValue getEnum(Object obj) throws IllegalAccessException,
+                        IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumObj = getList(obj, index);
+                    if (enumObj instanceof HDF5EnumerationValue)
+                    {
+                        return (HDF5EnumerationValue) enumObj;
+                    } else if (enumObj instanceof Number)
+                    {
+                        return new HDF5EnumerationValue(enumType, ((Number) enumObj).intValue());
+                    } else
+                    {
+                        return new HDF5EnumerationValue(enumType, enumObj.toString());
+                    }
+                }
+
+                @Override
+                HDF5EnumerationType tryGetEnumType()
+                {
+                    return enumType;
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5EnumerationType enumType,
+            final HDF5DataTypeVariant typeVariant, final HDF5CompoundMappingHints.EnumReturnType enumReturnType)
+    {
+        return new HDF5MemberByteifyer(null, memberName,
+                enumType.getStorageForm().getStorageSize(), offset, memOffset, false,
+                typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return enumType.getStorageForm().getStorageSize();
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return enumType.getStorageTypeId();
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return enumType.getNativeTypeId();
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    return getEnum(obj).toStorageForm();
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final Object enumValue =
+                            getEnumValue(enumType, byteArr, arrayOffset + offsetInMemory,
+                                    enumReturnType, null);
+                    setArray(obj, index, enumValue);
+                }
+
+                private HDF5EnumerationValue getEnum(Object obj) throws IllegalAccessException,
+                        IllegalArgumentException
+                {
+                    assert obj != null;
+                    final Object enumObj = getArray(obj, index);
+                    if (enumObj instanceof HDF5EnumerationValue)
+                    {
+                        return (HDF5EnumerationValue) enumObj;
+                    } else if (enumObj instanceof Number)
+                    {
+                        return new HDF5EnumerationValue(enumType, ((Number) enumObj).intValue());
+                    } else
+                    {
+                        return new HDF5EnumerationValue(enumType, enumObj.toString());
+                    }
+                }
+
+                @Override
+                HDF5EnumerationType tryGetEnumType()
+                {
+                    return enumType;
+                }
+            };
+    }
+
+    @SuppressWarnings(
+        { "rawtypes", "unchecked" })
+    static Object getEnumValue(final HDF5EnumerationType enumType, byte[] byteArr, int arrayOffset,
+            final HDF5CompoundMappingHints.EnumReturnType enumReturnType, Field fieldOrNull)
+    {
+        switch (enumReturnType)
+        {
+            case HDF5ENUMERATIONVALUE:
+                return enumType.createFromStorageForm(byteArr, arrayOffset);
+            case STRING:
+                return enumType.createStringFromStorageForm(byteArr, arrayOffset);
+            case ORDINAL:
+                return enumType.getOrdinalFromStorageForm(byteArr, arrayOffset);
+            case JAVAENUMERATION:
+            {
+                if (fieldOrNull == null)
+                {
+                    throw new HDF5JavaException(
+                            "JAVAENUMERATIONTYPE only available with access type FIELD");
+                }
+                final String value = enumType.createStringFromStorageForm(byteArr, arrayOffset);
+                final Class<Enum> enumClass = (Class<Enum>) fieldOrNull.getType();
+                return Enum.valueOf(enumClass, value);
+            }
+        }
+        throw new Error("Unknown EnumReturnType " + enumReturnType);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerFloatFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerFloatFactory.java
new file mode 100644
index 0000000..addfadb
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerFloatFactory.java
@@ -0,0 +1,519 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.FLOAT_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>float</code>,
+ * <code>float[]</code>, <code>float[][]</code> and <code>MDFloatArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerFloatFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(float.class, 0), ARRAY1D(float[].class, 1), ARRAY2D(float[][].class, 2), ARRAYMD(
+                MDFloatArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null
+                    || (typeInfo.getDataClass() != HDF5DataClass.FLOAT && typeInfo.getDataClass() != HDF5DataClass.INTEGER))
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? H5T_IEEE_F32LE : ((storageTypeId < 0) ? fileInfoProvider
+                        .getArrayTypeId(H5T_IEEE_F32LE, dimensions) : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, FLOAT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.floatToByte(field.getFloat(obj));
+                        case ARRAY1D:
+                            return HDFNativeData.floatToByte((float[]) field.get(obj));
+                        case ARRAY2D:
+                        {
+                            final float[][] array = (float[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDFloatArray array = (MDFloatArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setFloat(obj,
+                                    HDFNativeData.byteToFloat(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            field.set(obj, HDFNativeData.byteToFloat(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, new MDFloatArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, FLOAT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.floatToByte(((Number) getMap(obj, memberName))
+                                    .floatValue());
+                        case ARRAY1D:
+                            return HDFNativeData.floatToByte((float[]) getMap(obj, memberName));
+                        case ARRAY2D:
+                        {
+                            final float[][] array = (float[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDFloatArray array = (MDFloatArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToFloat(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToFloat(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, new MDFloatArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, FLOAT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.floatToByte(((Number) getList(obj, index))
+                                    .floatValue());
+                        case ARRAY1D:
+                            return HDFNativeData.floatToByte((float[]) getList(obj, index));
+                        case ARRAY2D:
+                        {
+                            final float[][] array = (float[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDFloatArray array = (MDFloatArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index,
+                                    HDFNativeData.byteToFloat(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToFloat(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, new MDFloatArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, FLOAT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.floatToByte(((Number) getArray(obj, index))
+                                    .floatValue());
+                        case ARRAY1D:
+                            return HDFNativeData.floatToByte((float[]) getArray(obj, index));
+                        case ARRAY2D:
+                        {
+                            final float[][] array = (float[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDFloatArray array = (MDFloatArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.floatToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToFloat(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            setArray(obj, index, HDFNativeData.byteToFloat(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final float[] array =
+                                    HDFNativeData
+                                            .byteToFloat(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, new MDFloatArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerHDF5TimeDurationFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerHDF5TimeDurationFactory.java
new file mode 100644
index 0000000..7f7ce43
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerHDF5TimeDurationFactory.java
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.LONG_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+
+import java.lang.reflect.Field;
+
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for
+ * {@link HDF5TimeDuration}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerHDF5TimeDurationFactory implements
+        IHDF5CompoundMemberBytifyerFactory
+{
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            return (clazz == HDF5TimeDuration.class || Long.class.isAssignableFrom(clazz))
+                    && memberInfoOrNull.getType().isTimeDuration();
+        } else
+        {
+            return (clazz == HDF5TimeDuration.class);
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        if (dataClass == HDF5DataClass.INTEGER && rank == 0 && elementSize == 8
+                && typeVariantOrNull != null && typeVariantOrNull.isTimeDuration())
+        {
+            return HDF5TimeDuration.class;
+        } else
+        {
+            return null;
+        }
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final HDF5DataTypeVariant typeVariant =
+                HDF5DataTypeVariant.isTypeVariant(member.tryGetTypeVariant()) ? member
+                        .tryGetTypeVariant() : HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS;
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        typeVariant);
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, typeVariant);
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, typeVariant);
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset,
+                        typeVariant);
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                final HDF5TimeUnit timeUnit = HDF5DataTypeVariant.getTimeUnit(typeVariant);
+
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final HDF5TimeDuration duration = (HDF5TimeDuration) field.get(obj);
+                    return HDFNativeData.longToByte(timeUnit.convert(duration.getValue(),
+                            duration.getUnit()));
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    field.set(
+                            obj,
+                            new HDF5TimeDuration(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory), timeUnit));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                final HDF5TimeUnit timeUnit = HDF5DataTypeVariant.getTimeUnit(typeVariant);
+
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object durationObj = getMap(obj, memberName);
+                    if (durationObj instanceof HDF5TimeDuration)
+                    {
+                        final HDF5TimeDuration duration = (HDF5TimeDuration) durationObj;
+                        return HDFNativeData.longToByte(timeUnit.convert(duration.getValue(),
+                                duration.getUnit()));
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Number) durationObj).longValue());
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    putMap(obj,
+                            memberName,
+                            new HDF5TimeDuration(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory), timeUnit));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                final HDF5TimeUnit timeUnit = HDF5DataTypeVariant.getTimeUnit(typeVariant);
+
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object durationObj = getList(obj, index);
+                    if (durationObj instanceof HDF5TimeDuration)
+                    {
+                        final HDF5TimeDuration duration = (HDF5TimeDuration) durationObj;
+                        return HDFNativeData.longToByte(timeUnit.convert(duration.getValue(),
+                                duration.getUnit()));
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Number) durationObj).longValue());
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    setList(obj,
+                            index,
+                            new HDF5TimeDuration(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory), timeUnit));
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE, offset, memOffset,
+                false, typeVariant)
+            {
+                final HDF5TimeUnit timeUnit = HDF5DataTypeVariant.getTimeUnit(typeVariant);
+
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return H5T_STD_I64LE;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object durationObj = getArray(obj, index);
+                    if (durationObj instanceof HDF5TimeDuration)
+                    {
+                        final HDF5TimeDuration duration = (HDF5TimeDuration) durationObj;
+                        return HDFNativeData.longToByte(timeUnit.convert(duration.getValue(),
+                                duration.getUnit()));
+                    } else
+                    {
+                        return HDFNativeData.longToByte(((Number) durationObj).longValue());
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    setArray(
+                            obj,
+                            index,
+                            new HDF5TimeDuration(HDFNativeData.byteToLong(byteArr, arrayOffset
+                                    + offsetInMemory), timeUnit));
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerIntFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerIntFactory.java
new file mode 100644
index 0000000..897b57d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerIntFactory.java
@@ -0,0 +1,515 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.INT_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>int</code>,
+ * <code>int[]</code>, <code>int[][]</code> and <code>MDIntArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerIntFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(int.class, 0), ARRAY1D(int[].class, 1), ARRAY2D(int[][].class, 2), ARRAYMD(
+                MDIntArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null
+                    || (typeInfo.getDataClass() != HDF5DataClass.INTEGER && typeInfo.getDataClass() != HDF5DataClass.FLOAT))
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? member.isUnsigned() ? H5T_STD_U32LE : H5T_STD_I32LE
+                        : ((storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(
+                                member.isUnsigned() ? H5T_STD_U32LE : H5T_STD_I32LE, dimensions)
+                                : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, INT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.intToByte(field.getInt(obj));
+                        case ARRAY1D:
+                            return HDFNativeData.intToByte((int[]) field.get(obj));
+                        case ARRAY2D:
+                        {
+                            final int[][] array = (int[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDIntArray array = (MDIntArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setInt(obj,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            field.set(obj,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, new MDIntArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, INT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.intToByte(((Number) getMap(obj, memberName))
+                                    .intValue());
+                        case ARRAY1D:
+                            return HDFNativeData.intToByte((int[]) getMap(obj, memberName));
+                        case ARRAY2D:
+                        {
+                            final int[][] array = (int[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDIntArray array = (MDIntArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, new MDIntArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, INT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.intToByte(((Number) getList(obj, index))
+                                    .intValue());
+                        case ARRAY1D:
+                            return HDFNativeData.intToByte((int[]) getList(obj, index));
+                        case ARRAY2D:
+                        {
+                            final int[][] array = (int[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDIntArray array = (MDIntArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, new MDIntArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, INT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 4;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.intToByte(((Number) getArray(obj, index))
+                                    .intValue());
+                        case ARRAY1D:
+                            return HDFNativeData.intToByte((int[]) getArray(obj, index));
+                        case ARRAY2D:
+                        {
+                            final int[][] array = (int[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDIntArray array = (MDIntArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.intToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final int[] array =
+                                    HDFNativeData.byteToInt(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, new MDIntArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerLongFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerLongFactory.java
new file mode 100644
index 0000000..d6bc0e8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerLongFactory.java
@@ -0,0 +1,515 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.LONG_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>long</code>,
+ * <code>long[]</code>, <code>long[][]</code> and <code>MDLongArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerLongFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(long.class, 0), ARRAY1D(long[].class, 1), ARRAY2D(long[][].class, 2), ARRAYMD(
+                MDLongArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null || typeInfo.getDataClass() != HDF5DataClass.INTEGER
+                    || typeInfo.getElementSize() != LONG_SIZE)
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? member.isUnsigned() ? H5T_STD_U64LE : H5T_STD_I64LE
+                        : ((storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(
+                                member.isUnsigned() ? H5T_STD_U64LE : H5T_STD_I64LE, dimensions)
+                                : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, LONG_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.longToByte(field.getLong(obj));
+                        case ARRAY1D:
+                            return HDFNativeData.longToByte((long[]) field.get(obj));
+                        case ARRAY2D:
+                        {
+                            final long[][] array = (long[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDLongArray array = (MDLongArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setLong(obj,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            field.set(obj,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, new MDLongArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.longToByte(((Number) getMap(obj, memberName))
+                                    .longValue());
+                        case ARRAY1D:
+                            return HDFNativeData.longToByte((long[]) getMap(obj, memberName));
+                        case ARRAY2D:
+                        {
+                            final long[][] array = (long[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDLongArray array = (MDLongArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, new MDLongArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.longToByte(((Number) getList(obj, index))
+                                    .longValue());
+                        case ARRAY1D:
+                            return HDFNativeData.longToByte((long[]) getList(obj, index));
+                        case ARRAY2D:
+                        {
+                            final long[][] array = (long[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDLongArray array = (MDLongArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, new MDLongArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, LONG_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 8;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.longToByte(((Number) getArray(obj, index))
+                                    .longValue());
+                        case ARRAY1D:
+                            return HDFNativeData.longToByte((long[]) getArray(obj, index));
+                        case ARRAY2D:
+                        {
+                            final long[][] array = (long[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDLongArray array = (MDLongArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.longToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final long[] array =
+                                    HDFNativeData.byteToLong(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, new MDLongArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerShortFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerShortFactory.java
new file mode 100644
index 0000000..f9a956d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerShortFactory.java
@@ -0,0 +1,523 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.base.convert.NativeData.SHORT_SIZE;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I16LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;
+
+import java.lang.reflect.Field;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>short</code>,
+ * <code>short[]</code>, <code>short[][]</code> and <code>MDShortArray</code>.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerShortFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static Map<Class<?>, Rank> classToRankMap = new IdentityHashMap<Class<?>, Rank>();
+
+    private enum Rank
+    {
+        SCALAR(short.class, 0), ARRAY1D(short[].class, 1), ARRAY2D(short[][].class, 2), ARRAYMD(
+                MDShortArray.class, -1);
+
+        private final Class<?> clazz;
+
+        private final int rank;
+
+        Rank(Class<?> clazz, int rank)
+        {
+            this.clazz = clazz;
+            this.rank = rank;
+        }
+
+        int getRank()
+        {
+            return rank;
+        }
+
+        boolean isScalar()
+        {
+            return rank == 0;
+        }
+
+        boolean anyRank()
+        {
+            return rank == -1;
+        }
+
+        Class<?> getClazz()
+        {
+            return clazz;
+        }
+    }
+
+    static
+    {
+        for (Rank r : Rank.values())
+        {
+            classToRankMap.put(r.getClazz(), r);
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        final Rank rankOrNull = classToRankMap.get(clazz);
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataTypeInformation typeInfo = memberInfoOrNull.getType();
+            if (rankOrNull == null || typeInfo.getDataClass() != HDF5DataClass.INTEGER
+                    || typeInfo.getElementSize() != SHORT_SIZE)
+            {
+                return false;
+            }
+            return rankOrNull.anyRank()
+                    || (rankOrNull.getRank() == typeInfo.getDimensions().length)
+                    || (rankOrNull.isScalar() && typeInfo.getDimensions().length == 1 && typeInfo
+                            .getDimensions()[0] == 1);
+
+        } else
+        {
+            return rankOrNull != null;
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileInfoProvider)
+    {
+        final String memberName = member.getMemberName();
+        final Rank rank = classToRankMap.get(memberClazz);
+        final int len =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType()
+                        .getNumberOfElements() : rank.isScalar() ? 1 : member.getMemberTypeLength();
+        final int[] dimensions = rank.isScalar() ? new int[]
+            { 1 } : member.getMemberTypeDimensions();
+        final int storageTypeId = member.getStorageDataTypeId();
+        final int memberTypeId =
+                rank.isScalar() ? (member.isUnsigned() ? H5T_STD_U16LE : H5T_STD_I16LE)
+                        : ((storageTypeId < 0) ? fileInfoProvider.getArrayTypeId(
+                                member.isUnsigned() ? H5T_STD_U16LE : H5T_STD_I16LE, dimensions)
+                                : storageTypeId);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, offset, memOffset,
+                        dimensions, len, memberTypeId, rank, member.tryGetTypeVariant());
+            case MAP:
+                return createByteifyerForMap(memberName, offset, memOffset, dimensions, len,
+                        memberTypeId, rank, member.tryGetTypeVariant());
+            case LIST:
+                return createByteifyerForList(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            case ARRAY:
+                return createByteifyerForArray(memberName, index, offset, memOffset, dimensions,
+                        len, memberTypeId, rank, member.tryGetTypeVariant());
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5MemberByteifyer(field, memberName, SHORT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 2;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.shortToByte(field.getShort(obj));
+                        case ARRAY1D:
+                            return HDFNativeData.shortToByte((short[]) field.get(obj));
+                        case ARRAY2D:
+                        {
+                            final short[][] array = (short[][]) field.get(obj);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDShortArray array = (MDShortArray) field.get(obj);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            field.setShort(obj,
+                                    HDFNativeData.byteToShort(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            field.set(obj, HDFNativeData.byteToShort(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            field.set(obj, new MDShortArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName, final int offset,
+            int memOffset, final int[] dimensions, final int len, final int memberTypeId,
+            final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, SHORT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 2;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.shortToByte(((Number) getMap(obj, memberName))
+                                    .shortValue());
+                        case ARRAY1D:
+                            return HDFNativeData.shortToByte((short[]) getMap(obj, memberName));
+                        case ARRAY2D:
+                        {
+                            final short[][] array = (short[][]) getMap(obj, memberName);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDShortArray array = (MDShortArray) getMap(obj, memberName);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            putMap(obj, memberName,
+                                    HDFNativeData.byteToShort(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToShort(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            putMap(obj, memberName, new MDShortArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, SHORT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 2;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.shortToByte(((Number) getList(obj, index))
+                                    .shortValue());
+                        case ARRAY1D:
+                            return HDFNativeData.shortToByte((short[]) getList(obj, index));
+                        case ARRAY2D:
+                        {
+                            final short[][] array = (short[][]) getList(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDShortArray array = (MDShortArray) getList(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setList(obj, index,
+                                    HDFNativeData.byteToShort(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            putMap(obj, memberName, HDFNativeData.byteToShort(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            setList(obj, index, new MDShortArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName, final int index,
+            final int offset, int memOffset, final int[] dimensions, final int len,
+            final int memberTypeId, final Rank rank, final HDF5DataTypeVariant typeVariant)
+    {
+        return new HDF5MemberByteifyer(null, memberName, SHORT_SIZE * len, offset, memOffset,
+                false, typeVariant)
+            {
+                @Override
+                int getElementSize()
+                {
+                    return 2;
+                }
+
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return memberTypeId;
+                }
+
+                @Override
+                protected int getMemberNativeTypeId()
+                {
+                    return -1;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            return HDFNativeData.shortToByte(((Number) getArray(obj, index))
+                                    .shortValue());
+                        case ARRAY1D:
+                            return HDFNativeData.shortToByte((short[]) getArray(obj, index));
+                        case ARRAY2D:
+                        {
+                            final short[][] array = (short[][]) getArray(obj, index);
+                            MatrixUtils.checkMatrixDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(MatrixUtils.flatten(array));
+                        }
+                        case ARRAYMD:
+                        {
+                            final MDShortArray array = (MDShortArray) getArray(obj, index);
+                            MatrixUtils.checkMDArrayDimensions(memberName, dimensions, array);
+                            return HDFNativeData.shortToByte(array.getAsFlatArray());
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    switch (rank)
+                    {
+                        case SCALAR:
+                            setArray(obj, index,
+                                    HDFNativeData.byteToShort(byteArr, arrayOffset + offsetInMemory));
+                            break;
+                        case ARRAY1D:
+                            setArray(obj, index, HDFNativeData.byteToShort(byteArr, arrayOffset
+                                    + offsetInMemory, len));
+                            break;
+                        case ARRAY2D:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, MatrixUtils.shapen(array, dimensions));
+                            break;
+                        }
+                        case ARRAYMD:
+                        {
+                            final short[] array =
+                                    HDFNativeData
+                                            .byteToShort(byteArr, arrayOffset + offsetInMemory, len);
+                            setArray(obj, index, new MDShortArray(array, dimensions));
+                            break;
+                        }
+                        default:
+                            throw new Error("Unknown rank.");
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerStringFactory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerStringFactory.java
new file mode 100644
index 0000000..35f8557
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberByteifyerStringFactory.java
@@ -0,0 +1,415 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getList;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.getMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.putMap;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setArray;
+import static ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.setList;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_REF_OBJ;
+
+import java.lang.reflect.Field;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.AccessType;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+import ch.systemsx.cisd.hdf5.HDF5ValueObjectByteifyer.IFileAccessProvider;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A {@link HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory} for <code>String</code>
+ * .
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundMemberByteifyerStringFactory implements IHDF5CompoundMemberBytifyerFactory
+{
+
+    private static abstract class HDF5StringMemberByteifyer extends HDF5MemberByteifyer
+    {
+        HDF5StringMemberByteifyer(Field fieldOrNull, String memberName, int size, int offset,
+                int memOffset, CharacterEncoding encoding, int maxCharacters,
+                boolean isVariableLengthType, boolean isReferenceType)
+        {
+            super(fieldOrNull, memberName, size, offset, memOffset, encoding, maxCharacters,
+                    isVariableLengthType, isReferenceType);
+        }
+
+        /**
+         * For strings, this is the <i>minimal</i> element size 1 for fixed strings or the size of a
+         * pointer for variable-length strings.
+         */
+        @Override
+        int getElementSize()
+        {
+            return isVariableLengthType() ? HDFNativeData.getMachineWordSize() : 1;
+        }
+
+        @Override
+        public boolean mayBeCut()
+        {
+            return true;
+        }
+
+        @Override
+        protected int getMemberNativeTypeId()
+        {
+            return -1;
+        }
+    }
+
+    @Override
+    public boolean canHandle(Class<?> clazz, HDF5CompoundMemberInformation memberInfoOrNull)
+    {
+        if (memberInfoOrNull != null)
+        {
+            final HDF5DataClass dataClass = memberInfoOrNull.getType().getDataClass();
+            return ((clazz == String.class) || (clazz == char[].class))
+                    && (dataClass == HDF5DataClass.STRING || dataClass == HDF5DataClass.REFERENCE);
+        } else
+        {
+            return (clazz == String.class) || (clazz == char[].class);
+        }
+    }
+
+    @Override
+    public Class<?> tryGetOverrideJavaType(HDF5DataClass dataClass, int rank, int elementSize,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return null;
+    }
+
+    @Override
+    public HDF5MemberByteifyer createBytifyer(AccessType accessType, Field fieldOrNull,
+            HDF5CompoundMemberMapping member,
+            HDF5CompoundMemberInformation compoundMemberInfoOrNull,
+            HDF5EnumerationType enumTypeOrNull, Class<?> memberClazz, int index, int offset,
+            int memOffset, IFileAccessProvider fileAccessProvider)
+    {
+        final String memberName = member.getMemberName();
+        final int maxCharacters = member.getMemberTypeLength();
+        final boolean isVariableLengthType =
+                member.isVariableLength()
+                        || (maxCharacters == 0 && member.tryGetHints() != null && member
+                                .tryGetHints().isUseVariableLengthStrings());
+        final boolean isReferenceType = member.isReference();
+        // May be -1 if not known
+        final int memberTypeId =
+                isVariableLengthType ? fileAccessProvider.getVariableLengthStringDataTypeId()
+                        : member.getStorageDataTypeId();
+        final CharacterEncoding encoding =
+                isReferenceType ? CharacterEncoding.ASCII : fileAccessProvider
+                        .getCharacterEncoding(memberTypeId);
+        final int size =
+                (compoundMemberInfoOrNull != null) ? compoundMemberInfoOrNull.getType().getSize()
+                        : encoding.getMaxBytesPerChar() * maxCharacters;
+        final int stringOrRefDataTypeId =
+                (memberTypeId < 0) ? (isReferenceType ? H5T_STD_REF_OBJ : fileAccessProvider
+                        .getStringDataTypeId(size)) : memberTypeId;
+        final boolean isCharArray = (memberClazz == char[].class);
+        switch (accessType)
+        {
+            case FIELD:
+                return createByteifyerForField(fieldOrNull, memberName, fileAccessProvider, offset,
+                        memOffset, stringOrRefDataTypeId, maxCharacters, size, encoding,
+                        isCharArray, isVariableLengthType, isReferenceType);
+            case MAP:
+                return createByteifyerForMap(memberName, fileAccessProvider, offset, memOffset,
+                        stringOrRefDataTypeId, maxCharacters, size, encoding, isCharArray,
+                        isVariableLengthType, isReferenceType);
+            case LIST:
+                return createByteifyerForList(memberName, fileAccessProvider, index, offset,
+                        memOffset, stringOrRefDataTypeId, maxCharacters, size, encoding,
+                        isCharArray, isVariableLengthType, isReferenceType);
+            case ARRAY:
+                return createByteifyerForArray(memberName, fileAccessProvider, index, offset,
+                        memOffset, stringOrRefDataTypeId, maxCharacters, size, encoding,
+                        isCharArray, isVariableLengthType, isReferenceType);
+            default:
+                throw new Error("Unknown access type");
+        }
+    }
+
+    private static String refToStr(byte[] byteArr, int offset)
+    {
+        final long reference = NativeData.byteToLong(byteArr, ByteOrder.NATIVE, offset, 1)[0];
+        return '\0' + Long.toString(reference);
+    }
+
+    static String bytesToString(byte[] byteArr, final int totalOffset, final int maxIdx,
+            CharacterEncoding encoding, final boolean isVariableLengthType,
+            final boolean isReferenceType)
+    {
+        final String s;
+        if (isVariableLengthType)
+        {
+            s = HDFNativeData.createVLStrFromCompound(byteArr, totalOffset);
+        } else if (isReferenceType)
+        {
+            s = refToStr(byteArr, totalOffset);
+        } else
+        {
+            s = StringUtils.fromBytes0Term(byteArr, totalOffset, maxIdx, encoding);
+        }
+        return s;
+    }
+
+    private HDF5MemberByteifyer createByteifyerForField(final Field field, final String memberName,
+            final IFileAccessProvider fileAccessProvider, final int offset, int memOffset,
+            final int stringOrRefDataTypeId, final int maxCharacters, final int size,
+            final CharacterEncoding encoding, final boolean isCharArray,
+            final boolean isVariableLengthType, final boolean isReferenceType)
+    {
+        ReflectionUtils.ensureAccessible(field);
+        return new HDF5StringMemberByteifyer(field, memberName, size, offset, memOffset, encoding,
+                maxCharacters, isVariableLengthType, isReferenceType)
+            {
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return stringOrRefDataTypeId;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    Object o = field.get(obj);
+                    if (o == null)
+                    {
+                        throw new NullPointerException("Field '" + field.getName() + "' is null");
+
+                    }
+                    final String s = isCharArray ? new String((char[]) o) : o.toString();
+                    if (isVariableLengthType)
+                    {
+                        final byte[] result = new byte[HDFNativeData.getMachineWordSize()];
+                        HDFNativeData.compoundCpyVLStr(s, result, 0);
+                        return result;
+                    } else if (isReferenceType)
+                    {
+                        return fileAccessProvider.createObjectReference(s);
+                    } else
+                    {
+                        return StringUtils.toBytes0Term(s, getMaxCharacters(), encoding);
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final int totalOffset = arrayOffset + offsetInMemory;
+                    final int maxIdx = totalOffset + maxCharacters;
+                    final String s =
+                            bytesToString(byteArr, totalOffset, maxIdx, encoding,
+                                    isVariableLengthType, isReferenceType);
+                    field.set(obj, isCharArray ? s.toCharArray() : s);
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForMap(final String memberName,
+            final IFileAccessProvider fileAccessProvider, final int offset, int memOffset,
+            final int stringOrRefDataTypeId, final int maxCharacters, final int size,
+            final CharacterEncoding encoding, final boolean isCharArray,
+            final boolean isVariableLengthType, final boolean isReferenceType)
+    {
+        return new HDF5StringMemberByteifyer(null, memberName, size, offset, memOffset, encoding,
+                maxCharacters, isVariableLengthType, isReferenceType)
+            {
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return stringOrRefDataTypeId;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object o = getMap(obj, memberName);
+                    final String s;
+                    if (o.getClass() == char[].class)
+                    {
+                        s = new String((char[]) o);
+                    } else
+                    {
+                        s = o.toString();
+                    }
+                    if (isVariableLengthType)
+                    {
+                        final byte[] result = new byte[HDFNativeData.getMachineWordSize()];
+                        HDFNativeData.compoundCpyVLStr(s, result, 0);
+                        return result;
+                    } else if (isReferenceType)
+                    {
+                        return fileAccessProvider.createObjectReference(s);
+                    } else
+                    {
+                        return StringUtils.toBytes0Term(s, getMaxCharacters(), encoding);
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final int totalOffset = arrayOffset + offsetInMemory;
+                    final int maxIdx = totalOffset + maxCharacters;
+                    final String s =
+                            bytesToString(byteArr, totalOffset, maxIdx, encoding,
+                                    isVariableLengthType, isReferenceType);
+                    if (isCharArray)
+                    {
+                        putMap(obj, memberName, s.toCharArray());
+                    } else
+                    {
+                        putMap(obj, memberName, s);
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForList(final String memberName,
+            final IFileAccessProvider fileAccessProvider, final int index, final int offset,
+            int memOffset, final int stringOrRefDataTypeId, final int maxCharacters,
+            final int size, final CharacterEncoding encoding, final boolean isCharArray,
+            final boolean isVariableLengthType, final boolean isReferenceType)
+    {
+        return new HDF5StringMemberByteifyer(null, memberName, size, offset, memOffset, encoding,
+                maxCharacters, isVariableLengthType, isReferenceType)
+            {
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return stringOrRefDataTypeId;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object o = getList(obj, index);
+                    final String s;
+                    if (o.getClass() == char[].class)
+                    {
+                        s = new String((char[]) o);
+                    } else
+                    {
+                        s = o.toString();
+                    }
+                    if (isVariableLengthType)
+                    {
+                        final byte[] result = new byte[HDFNativeData.getMachineWordSize()];
+                        HDFNativeData.compoundCpyVLStr(s, result, 0);
+                        return result;
+                    } else if (isReferenceType)
+                    {
+                        return fileAccessProvider.createObjectReference(s);
+                    } else
+                    {
+                        return StringUtils.toBytes0Term(s, getMaxCharacters(), encoding);
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final int totalOffset = arrayOffset + offsetInMemory;
+                    final int maxIdx = totalOffset + maxCharacters;
+                    final String s =
+                            bytesToString(byteArr, totalOffset, maxIdx, encoding,
+                                    isVariableLengthType, isReferenceType);
+                    if (isCharArray)
+                    {
+                        setList(obj, index, s.toCharArray());
+                    } else
+                    {
+                        setList(obj, index, s);
+                    }
+                }
+            };
+    }
+
+    private HDF5MemberByteifyer createByteifyerForArray(final String memberName,
+            final IFileAccessProvider fileAccessProvider, final int index, final int offset,
+            int memOffset, final int stringOrRefDataTypeId, final int maxCharacters,
+            final int size, final CharacterEncoding encoding, final boolean isCharArray,
+            final boolean isVariableLengthType, final boolean isReferenceType)
+    {
+        return new HDF5StringMemberByteifyer(null, memberName, size, offset, memOffset, encoding,
+                maxCharacters, isVariableLengthType, isReferenceType)
+            {
+                @Override
+                protected int getMemberStorageTypeId()
+                {
+                    return stringOrRefDataTypeId;
+                }
+
+                @Override
+                public byte[] byteify(int compoundDataTypeId, Object obj)
+                        throws IllegalAccessException
+                {
+                    final Object o = getArray(obj, index);
+                    final String s;
+                    if (o.getClass() == char[].class)
+                    {
+                        s = new String((char[]) o);
+                    } else
+                    {
+                        s = o.toString();
+                    }
+                    if (isVariableLengthType)
+                    {
+                        final byte[] result = new byte[HDFNativeData.getMachineWordSize()];
+                        HDFNativeData.compoundCpyVLStr(s, result, 0);
+                        return result;
+                    } else if (isReferenceType)
+                    {
+                        return fileAccessProvider.createObjectReference(s);
+                    } else
+                    {
+                        return StringUtils.toBytes0Term(s, getMaxCharacters(), encoding);
+                    }
+                }
+
+                @Override
+                public void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+                        int arrayOffset) throws IllegalAccessException
+                {
+                    final int totalOffset = arrayOffset + offsetInMemory;
+                    final int maxIdx = totalOffset + maxCharacters;
+                    final String s =
+                            bytesToString(byteArr, totalOffset, maxIdx, encoding,
+                                    isVariableLengthType, isReferenceType);
+                    if (isCharArray)
+                    {
+                        setArray(obj, index, s.toCharArray());
+                    } else
+                    {
+                        setArray(obj, index, s);
+                    }
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberInformation.java
new file mode 100644
index 0000000..538434d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberInformation.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.BitSet;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * Contains information about one member of an HDF5 compound data type.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5CompoundMemberInformation implements
+        Comparable<HDF5CompoundMemberInformation>
+{
+    private final String memberName;
+
+    private final HDF5DataTypeInformation dataTypeInformation;
+
+    private final int offsetOnDisk;
+
+    private final int offsetInMemory;
+
+    private final String[] enumValuesOrNull;
+
+    HDF5CompoundMemberInformation(String memberName, HDF5DataTypeInformation dataTypeInformation,
+            int offsetOnDisk, int offsetInMemory, String[] enumValuesOrNull)
+    {
+        assert memberName != null;
+        assert dataTypeInformation != null;
+        assert offsetOnDisk >= 0;
+
+        this.memberName = memberName;
+        this.dataTypeInformation = dataTypeInformation;
+        this.enumValuesOrNull = enumValuesOrNull;
+        this.offsetOnDisk = offsetOnDisk;
+        this.offsetInMemory = PaddingUtils.padOffset(offsetInMemory, dataTypeInformation.getElementSizeForPadding());
+
+    }
+
+    HDF5CompoundMemberInformation(String memberName, HDF5DataTypeInformation dataTypeInformation,
+            int offsetOnDisk, int offsetInMemory)
+    {
+        this(memberName, dataTypeInformation, offsetOnDisk, offsetInMemory, null);
+    }
+
+    /**
+     * Returns the name of the member.
+     */
+    public String getName()
+    {
+        return memberName;
+    }
+
+    /**
+     * Returns the type information of the member.
+     */
+    public HDF5DataTypeInformation getType()
+    {
+        return dataTypeInformation;
+    }
+
+    /**
+     * Returns the values of the enumeration type of this compound member, if it is of an
+     * enumeration type and <code>null</code> otherwise.
+     */
+    public String[] tryGetEnumValues()
+    {
+        return enumValuesOrNull;
+    }
+
+    /**
+     * Returns the byte offset of this member within the compound data type, 0 meaning that the
+     * member is the first one in the compound data type.
+     * 
+     * @deprecated Use {@link #getOffsetOnDisk()} instead.
+     */
+    @Deprecated
+    public int getOffset()
+    {
+        return offsetOnDisk;
+    }
+
+    /**
+     * Returns the byte offset of this member within the compound data type on disk. 0 meaning that
+     * the member is the first one in the compound data type.
+     * <p>
+     * The on-disk representation is packed.
+     */
+    public int getOffsetOnDisk()
+    {
+        return offsetOnDisk;
+    }
+
+    /**
+     * Returns the byte offset of this member within the compound data type in memory. 0 meaning
+     * that the member is the first one in the compound data type.
+     * <p>
+     * The in-memory representation may contain padding to ensure that read access is always
+     * aligned.
+     */
+    public int getOffsetInMemory()
+    {
+        return offsetInMemory;
+    }
+
+    /**
+     * Creates the compound member information for the given <var>compoundClass</var> and
+     * <var>members</var>. The returned array will contain the members in the order of the
+     * <var>members</var>.
+     * <p>
+     * Call <code>Arrays.sort(compoundInformation)</code> to sort the array in alphabetical order of
+     * names.
+     * <p>
+     * Can be used to compare compound types, e.g. via
+     * {@link java.util.Arrays#equals(Object[], Object[])}.
+     */
+    public static HDF5CompoundMemberInformation[] create(Class<?> compoundClass,
+            String houseKeepingNameSuffix, final HDF5CompoundMemberMapping... members)
+    {
+        assert compoundClass != null;
+        final HDF5CompoundMemberInformation[] info =
+                new HDF5CompoundMemberInformation[members.length];
+        int offsetOnDisk = 0;
+        int offsetInMemory = 0;
+        for (int i = 0; i < info.length; ++i)
+        {
+            info[i] =
+                    new HDF5CompoundMemberInformation(members[i].getMemberName(),
+                            getTypeInformation(compoundClass, houseKeepingNameSuffix, members[i]),
+                            offsetOnDisk, offsetInMemory);
+            final int elementSize = info[i].getType().getElementSize();
+            final int size = info[i].getType().getSize();
+            offsetOnDisk += size;
+            offsetInMemory = PaddingUtils.padOffset(offsetInMemory + size, elementSize);
+        }
+        Arrays.sort(info);
+        return info;
+    }
+
+    private static HDF5DataTypeInformation getTypeInformation(Class<?> compoundClass,
+            String houseKeepingNameSuffix, final HDF5CompoundMemberMapping member)
+    {
+        final Field fieldOrNull = member.tryGetField(compoundClass);
+        final Class<?> fieldTypeOrNull = (fieldOrNull == null) ? null : fieldOrNull.getType();
+        final HDF5DataTypeInformation typeInfo;
+        if (fieldTypeOrNull == boolean.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.BOOLEAN, houseKeepingNameSuffix, 1,
+                            false);
+        } else if (fieldTypeOrNull == byte.class || fieldTypeOrNull == byte[].class
+                || fieldTypeOrNull == byte[][].class || fieldTypeOrNull == MDByteArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.INTEGER, houseKeepingNameSuffix, 1,
+                            false == member.isUnsigned());
+        } else if (fieldTypeOrNull == short.class || fieldTypeOrNull == short[].class
+                || fieldTypeOrNull == short[][].class || fieldTypeOrNull == MDShortArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.INTEGER, houseKeepingNameSuffix, 2,
+                            false == member.isUnsigned());
+        } else if (fieldTypeOrNull == int.class || fieldTypeOrNull == int[].class
+                || fieldTypeOrNull == int[][].class || fieldTypeOrNull == MDIntArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.INTEGER, houseKeepingNameSuffix, 4,
+                            false == member.isUnsigned());
+        } else if (fieldTypeOrNull == long.class || fieldTypeOrNull == long[].class
+                || fieldTypeOrNull == long[][].class || fieldTypeOrNull == MDLongArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.INTEGER, houseKeepingNameSuffix, 8,
+                            false == member.isUnsigned());
+        } else if (fieldTypeOrNull == BitSet.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.BITFIELD, houseKeepingNameSuffix, 8,
+                            member.getMemberTypeLength() / 64
+                                    + (member.getMemberTypeLength() % 64 != 0 ? 1 : 0), false);
+        } else if (fieldTypeOrNull == float.class || fieldTypeOrNull == float[].class
+                || fieldTypeOrNull == float[][].class || fieldTypeOrNull == MDFloatArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.FLOAT, houseKeepingNameSuffix, 4,
+                            true);
+        } else if (fieldTypeOrNull == double.class || fieldTypeOrNull == double[].class
+                || fieldTypeOrNull == double[][].class || fieldTypeOrNull == MDDoubleArray.class)
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.FLOAT, houseKeepingNameSuffix, 8,
+                            true);
+        } else if (fieldTypeOrNull == String.class || fieldTypeOrNull == char[].class)
+        {
+            if (member.isReference())
+            {
+                typeInfo =
+                        new HDF5DataTypeInformation(HDF5DataClass.REFERENCE, houseKeepingNameSuffix,
+                                HDF5BaseReader.REFERENCE_SIZE_IN_BYTES, false);
+            } else
+            {
+                typeInfo =
+                        new HDF5DataTypeInformation(HDF5DataClass.STRING, houseKeepingNameSuffix,
+                                member.getMemberTypeLength(), false);
+            }
+        } else if (fieldTypeOrNull == HDF5EnumerationValue.class)
+        {
+            final DataTypeInfoOptions options =
+                    new DataTypeInfoOptions("UNKNOWN".equals(member.tryGetEnumerationType()
+                            .getName()) == false, member.tryGetTypeVariant() != null);
+            typeInfo =
+                    new HDF5DataTypeInformation(
+                            options.knowsDataTypePath() ? HDF5Utils.createDataTypePath(
+                                    HDF5Utils.ENUM_PREFIX, houseKeepingNameSuffix, member
+                                            .tryGetEnumerationType().getName()) : null, options,
+                            HDF5DataClass.ENUM, houseKeepingNameSuffix, member
+                                    .tryGetEnumerationType().getStorageForm().getStorageSize(),
+                            false);
+            if (options.knowsDataTypeVariant())
+            {
+                typeInfo.setTypeVariant(member.tryGetTypeVariant());
+            }
+        } else
+        {
+            typeInfo =
+                    new HDF5DataTypeInformation(HDF5DataClass.OTHER, houseKeepingNameSuffix, -1,
+                            false);
+        }
+        if (fieldTypeOrNull != null
+                && (fieldTypeOrNull.isArray() && fieldTypeOrNull != char[].class)
+                || MDAbstractArray.class.isAssignableFrom(fieldTypeOrNull))
+        {
+            typeInfo.setDimensions(member.getMemberTypeDimensions());
+        }
+        return typeInfo;
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (obj == null || obj instanceof HDF5CompoundMemberInformation == false)
+        {
+            return false;
+        }
+        final HDF5CompoundMemberInformation that = (HDF5CompoundMemberInformation) obj;
+        return memberName.equals(that.memberName)
+                && dataTypeInformation.equals(that.dataTypeInformation);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        return (17 * 59 + memberName.hashCode()) * 59 + dataTypeInformation.hashCode();
+    }
+
+    @Override
+    public String toString()
+    {
+        return memberName + ":" + dataTypeInformation.toString();
+    }
+
+    //
+    // Comparable<HDF5CompoundMemberInformation>
+    //
+
+    @Override
+    public int compareTo(HDF5CompoundMemberInformation o)
+    {
+        return memberName.compareTo(o.memberName);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberMapping.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberMapping.java
new file mode 100644
index 0000000..2bcd5c5
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundMemberMapping.java
@@ -0,0 +1,1495 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import org.apache.commons.lang.StringUtils;
+
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+
+/**
+ * A class that maps a Java field to a member of a HDF5 compound data type.
+ * <p>
+ * Example on how to use:
+ * 
+ * <pre>
+ * static class Record
+ *     {
+ *         int i;
+ * 
+ *         String s;
+ * 
+ *         HDF5EnumerationValue e;
+ * 
+ *         Record(int i, String s, HDF5EnumerationValue e)
+ *         {
+ *             this.i = i;
+ *             this.e = e;
+ *             this.s = s;
+ *         }
+ * 
+ *         Record()
+ *         {
+ *         }
+ * 
+ *         static HDF5CompoundType<Record> getHDF5Type(HDF5Reader reader)
+ *         {
+ *             final HDF5EnumerationType enumType = reader.getEnumType("someEnumType", new String[]
+ *                 { "1", "Two", "THREE" });
+ *             return reader.getCompoundType(Record.class, mapping("i"), 
+ *                      mapping("s", 20), mapping("e", enumType));
+ *         }
+ * 
+ *     }
+ *         
+ *     ...
+ *         
+ *     final HDF5Writer writer = new HDF5Writer(new File("test.h5").open();
+ *     final HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+ *     final HDF5EnumerationType enumType = writer.getEnumType("someEnumType");
+ *     Record[] array =
+ *             new Record[]
+ *                 {
+ *                         new Record(1, "some text",
+ *                                 new HDF5EnumerationValue(enumType, "THREE")),
+ *                         new Record(2, "some note",
+ *                                 new HDF5EnumerationValue(enumType, "1")), };
+ *     writer.writeCompound("/testCompound", compoundType, recordWritten);
+ *     writer.close();
+ * </pre>
+ * 
+ * A simpler form is to let JHDF5 infer the mapping between fields in the Java object and members of
+ * the compound data type, see {@link #inferMapping(Class)} and {@link #inferMapping(Class, Map)}
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5CompoundMemberMapping
+{
+    private final String memberName;
+
+    private final int storageDataTypeId;
+
+    private final Field fieldOrNull;
+
+    private String fieldName;
+
+    private Class<?> memberClassOrNull;
+
+    private String enumTypeNameOrNull;
+
+    private int memberTypeLength;
+
+    private boolean unsigned;
+
+    private boolean variableLength;
+
+    private boolean reference;
+
+    private int[] memberTypeDimensions;
+
+    private HDF5EnumerationType enumTypeOrNull;
+
+    private HDF5DataTypeVariant typeVariantOrNull;
+
+    private HDF5CompoundMappingHints hintsOrNull;
+
+    /**
+     * Adds a member mapping for <var>memberName</var>.
+     * 
+     * @param memberName The name of the member in the compound type. field in the Java class. Will
+     *            also be used to find the name of the field in the Java class if not overridden by
+     *            {@link #fieldName(String)}.
+     */
+    public static HDF5CompoundMemberMapping mapping(String memberName)
+    {
+        return new HDF5CompoundMemberMapping(memberName);
+    }
+
+    /**
+     * Adds a member mapping for <var>fieldName</var>.
+     * 
+     * @param fieldName The name of the field in the Java class.
+     * @param memberName The name of the member in the compound type.
+     * @param memberClass The class of the member. Only used if the compound pojo class is a map.
+     *            For restrictions on the type, see above.
+     * @param memberDimensions The dimensions of the compound type (i.e. length of the String or
+     *            dimensions of the array).
+     * @param storageDataTypeId The storage data type id of the member, if known, or -1 else
+     * @param unsigned If <code>true</code>, map to an unsigned integer type.
+     * @param variableLength if <code>true</code>, map to a variable-length string type.
+     * @param reference if <code>true</code>, map to a reference type.
+     * @param typeVariantOrNull The data type variant of this mapping or <code>null</code>
+     */
+    static HDF5CompoundMemberMapping mappingArrayWithStorageId(String fieldName, String memberName,
+            Class<?> memberClass, int[] memberDimensions, int storageDataTypeId, boolean unsigned,
+            boolean variableLength, boolean reference, HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return new HDF5CompoundMemberMapping(fieldName, null, memberClass, memberName, null, null,
+                memberDimensions, storageDataTypeId, unsigned, variableLength, reference,
+                typeVariantOrNull);
+    }
+
+    /**
+     * Adds a member mapping for <var>fieldName</var>. Only suitable for Enumeration arrays.
+     * 
+     * @param fieldName The name of the field in the Java class.
+     * @param memberName The name of the member in the compound type.
+     * @param enumType The enumeration type in the HDF5 file.
+     * @param memberTypeDimensions The dimensions of the array in the compound type.
+     * @param storageTypeId the id of the storage type of this member.
+     * @param typeVariantOrNull The data type variant of this mapping or <code>null</code>
+     */
+    static HDF5CompoundMemberMapping mappingWithStorageTypeId(String fieldName, String memberName,
+            HDF5EnumerationType enumType, int[] memberTypeDimensions, int storageTypeId,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        assert enumType != null;
+        return new HDF5CompoundMemberMapping(fieldName, null, HDF5EnumerationValueArray.class,
+                memberName, enumType, null, memberTypeDimensions, storageTypeId, false, false,
+                false, typeVariantOrNull);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>pojoClass</var>. This method
+     * honors the annotations {@link CompoundType} and {@link CompoundElement}.
+     * <p>
+     * <em>Note 1:</em> All fields that correspond to members with a variable length (e.g. Strings,
+     * primitive arrays and matrices and objects of type <code>MDXXXArray</code>) need to be
+     * annotated with {@link CompoundElement} specifying their dimensions using
+     * {@link CompoundElement#dimensions()}. .
+     * <p>
+     * <em>Note 2:</em> <var>pojoClass</var> containing HDF5 enumerations cannot have their mapping
+     * inferred as the HDF5 enumeration type needs to be explicitly specified in the mapping.
+     * <p>
+     * <em>Example 1:</em>
+     * 
+     * <pre>
+     * class Record1
+     * {
+     *     @CompoundElement(dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record1.class) -> { mapping("s", 10), mapping("f") }
+     * </pre>
+     * 
+     * <em>Example 2:</em>
+     * 
+     * <pre>
+     * @CompoundType(mapAllFields = false)
+     * class Record2
+     * {
+     *     @CompoundElement(memberName = "someString", dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record2.class) -> { mapping("s", "someString", 10) }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Class<?> pojoClass)
+    {
+        return inferMapping(pojoClass, (HDF5CompoundMappingHints) null);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>pojoClass</var>. This method
+     * honors the annotations {@link CompoundType} and {@link CompoundElement}.
+     * <p>
+     * <em>Note 1:</em> All fields that correspond to members with a dimension (e.g. Strings,
+     * primitive arrays and matrices and objects of type <code>MDXXXArray</code>) need to be
+     * annotated with {@link CompoundElement} specifying their dimensions using
+     * {@link CompoundElement#dimensions()}. Strings can alternatively also be annotated as
+     * <code>CompoundElement.variableLength = true</code>.
+     * <p>
+     * <em>Note 2:</em> <var>pojoClass</var> containing HDF5 enumerations need to have their
+     * {@link HDF5EnumerationType} specified in the <var>fieldNameToEnumTypeMapOrNull</var>. You may
+     * use {@link #inferEnumerationTypeMap(Object, IHDF5EnumTypeRetriever)} to create
+     * <var>fieldNameToEnumTypeMapOrNull</var>.
+     * <p>
+     * <em>Example 1:</em>
+     * 
+     * <pre>
+     * class Record1
+     * {
+     *     @CompoundElement(dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record1.class) -> { mapping("s", 10), mapping("f") }
+     * </pre>
+     * 
+     * <em>Example 2:</em>
+     * 
+     * <pre>
+     * @CompoundType(mapAllFields = false)
+     * class Record2
+     * {
+     *     @CompoundElement(memberName = "someString", dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record2.class) -> { mapping("s", "someString", 10) }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Class<?> pojoClass,
+            final Map<String, HDF5EnumerationType> fieldNameToEnumTypeMapOrNull)
+    {
+        return inferMapping(pojoClass,
+                new HDF5CompoundMappingHints().enumTypeMapping(fieldNameToEnumTypeMapOrNull));
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>pojoClass</var>. This method
+     * honors the annotations {@link CompoundType} and {@link CompoundElement}.
+     * <p>
+     * <em>Note 1:</em> All fields that correspond to members with a dimension (e.g. Strings,
+     * primitive arrays and matrices and objects of type <code>MDXXXArray</code>) need to be
+     * annotated with {@link CompoundElement} specifying their dimensions using
+     * {@link CompoundElement#dimensions()}. Strings can alternatively also be annotated as
+     * <code>CompoundElement.variableLength = true</code>.
+     * <p>
+     * <em>Note 2:</em> <var>pojoClass</var> containing HDF5 enumerations need to have their
+     * {@link HDF5EnumerationType} specified in the <var>fieldNameToEnumTypeMapOrNull</var>. You may
+     * use {@link #inferEnumerationTypeMap(Object, IHDF5EnumTypeRetriever)} to create
+     * <var>fieldNameToEnumTypeMapOrNull</var>.
+     * <p>
+     * <em>Example 1:</em>
+     * 
+     * <pre>
+     * class Record1
+     * {
+     *     @CompoundElement(dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record1.class) -> { mapping("s", 10), mapping("f") }
+     * </pre>
+     * 
+     * <em>Example 2:</em>
+     * 
+     * <pre>
+     * @CompoundType(mapAllFields = false)
+     * class Record2
+     * {
+     *     @CompoundElement(memberName = "someString", dimension = 10)
+     *     String s;
+     * 
+     *     float f;
+     * }
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(Record2.class) -> { mapping("s", "someString", 10) }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Class<?> pojoClass,
+            final HDF5CompoundMappingHints hintsOrNull)
+    {
+        final List<HDF5CompoundMemberMapping> result =
+                new ArrayList<HDF5CompoundMemberMapping>(pojoClass.getDeclaredFields().length);
+        final CompoundType ct = pojoClass.getAnnotation(CompoundType.class);
+        final boolean includeAllFields = (ct != null) ? ct.mapAllFields() : true;
+        for (Class<?> c = pojoClass; c != null; c = c.getSuperclass())
+        {
+            for (Field f : c.getDeclaredFields())
+            {
+                final HDF5EnumerationType enumTypeOrNull =
+                        (hintsOrNull != null) ? hintsOrNull.tryGetEnumType(f.getName()) : null;
+                final CompoundElement e = f.getAnnotation(CompoundElement.class);
+                if (e != null)
+                {
+                    result.add(new HDF5CompoundMemberMapping(f.getName(), f, f.getType(),
+                            StringUtils.defaultIfEmpty(e.memberName(), f.getName()),
+                            enumTypeOrNull, e.typeName(), e.dimensions(), e.unsigned(), e
+                                    .variableLength(), e.reference(), HDF5DataTypeVariant
+                                    .unmaskNone(e.typeVariant())));
+                } else if (includeAllFields)
+                {
+                    final boolean variableLength =
+                            (hintsOrNull == null) ? false : hintsOrNull
+                                    .isUseVariableLengthStrings();
+                    result.add(new HDF5CompoundMemberMapping(f.getName(), f, f.getType(), f
+                            .getName(), enumTypeOrNull, null, new int[0], false, variableLength,
+                            false, null));
+                }
+            }
+        }
+        return result.toArray(new HDF5CompoundMemberMapping[result.size()]);
+    }
+
+    /**
+     * @see #inferMapping(Class, Map) <p>
+     *      This method is using <var>pojo</var> to infer length and dimension information.
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Object pojo,
+            final Map<String, HDF5EnumerationType> fieldNameToEnumTypeMapOrNull)
+    {
+        return inferMapping(pojo, fieldNameToEnumTypeMapOrNull, false);
+    }
+
+    /**
+     * This method is using <var>pojo</var> to infer length and dimension information.
+     * 
+     * @param pojo The popo to infer member names, length and dimension information from.
+     * @param fieldNameToEnumTypeMapOrNull The map to get member name to enumeration type mapping
+     *            from.
+     * @param useVariableLengthStringTypes If <code>true</code>, use variable-length string types
+     *            for all strings in the <var>pojo</var> template.
+     * @see #inferMapping(Class, Map) <p>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Object pojo,
+            final Map<String, HDF5EnumerationType> fieldNameToEnumTypeMapOrNull,
+            final boolean useVariableLengthStringTypes)
+    {
+        final HDF5CompoundMemberMapping[] result =
+                inferMapping(pojo.getClass(), fieldNameToEnumTypeMapOrNull);
+        for (HDF5CompoundMemberMapping m : result)
+        {
+            try
+            {
+                final Class<?> memberClass = m.tryGetMemberClass();
+                if (m.getMemberTypeDimensions().length == 0)
+                {
+                    if (memberClass == String.class)
+                    {
+                        ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                        if (useVariableLengthStringTypes)
+                        {
+                            m.variableLength(true);
+                        } else
+                        {
+                            final String value = (String) (m.fieldOrNull.get(pojo));
+                            m.length(value != null ? value.length() : 0);
+                        }
+                    } else if (memberClass.isArray())
+                    {
+                        if (memberClass.isArray())
+                        {
+                            ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                            final Object o = m.fieldOrNull.get(pojo);
+                            final int dimX = Array.getLength(o);
+                            if (memberClass.getComponentType().isArray() == false)
+                            {
+                                m.length(dimX);
+                            } else if (dimX > 0)
+                            {
+                                final Object firstElement = Array.get(o, 0);
+                                if (firstElement != null)
+                                {
+                                    final int dimY = Array.getLength(firstElement);
+                                    m.dimensions(new int[]
+                                        { dimX, dimY });
+                                }
+                            }
+                        }
+                    } else if (MDAbstractArray.class.isAssignableFrom(memberClass))
+                    {
+                        ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                        final Object o = m.fieldOrNull.get(pojo);
+                        m.dimensions(((MDAbstractArray<?>) o).dimensions());
+                    }
+                }
+            } catch (IllegalAccessException ex)
+            {
+                throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+            }
+        }
+        return result;
+    }
+
+    /**
+     * This method is using <var>pojo</var> to infer length and dimension information.
+     * 
+     * @param pojo The popo arrau to infer member names, length and dimension information from.
+     * @param fieldNameToEnumTypeMapOrNull The map to get member name to enumeration type mapping
+     *            from.
+     * @see #inferMapping(Class, Map) <p>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Object[] pojo,
+            final Map<String, HDF5EnumerationType> fieldNameToEnumTypeMapOrNull)
+    {
+        return inferMapping(pojo, fieldNameToEnumTypeMapOrNull);
+    }
+
+    /**
+     * This method is using <var>pojo</var> to infer length and dimension information.
+     * 
+     * @param pojo The popo arrau to infer member names, length and dimension information from.
+     * @param fieldNameToEnumTypeMapOrNull The map to get member name to enumeration type mapping
+     *            from.
+     * @param useVariableLengthStringTypes If <code>true</code>, use variable-length string types
+     *            for all strings in the <var>pojo</var> template.
+     * @see #inferMapping(Class, Map) <p>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Object[] pojo,
+            final Map<String, HDF5EnumerationType> fieldNameToEnumTypeMapOrNull,
+            final boolean useVariableLengthStringTypes)
+    {
+        final HDF5CompoundMemberMapping[] result =
+                inferMapping(pojo.getClass().getComponentType(), fieldNameToEnumTypeMapOrNull);
+        for (HDF5CompoundMemberMapping m : result)
+        {
+            try
+            {
+                final Class<?> memberClass = m.tryGetMemberClass();
+                if (m.getMemberTypeDimensions().length == 0)
+                {
+                    if (memberClass == String.class)
+                    {
+                        ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                        if (useVariableLengthStringTypes)
+                        {
+                            m.variableLength(true);
+                        } else
+                        {
+                            int maxLen = 0;
+                            for (int i = 0; i < pojo.length; ++i)
+                            {
+                                maxLen =
+                                        Math.max(maxLen,
+                                                ((String) (m.fieldOrNull.get(pojo[i]))).length());
+                            }
+                            m.length(maxLen);
+                        }
+                    } else if (memberClass.isArray())
+                    {
+                        if (memberClass.isArray() && pojo.length > 0)
+                        {
+                            ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                            final Object o = m.fieldOrNull.get(pojo[0]);
+                            final int dimX = Array.getLength(o);
+                            if (memberClass.getComponentType().isArray() == false)
+                            {
+                                m.length(dimX);
+                            } else if (dimX > 0)
+                            {
+                                final Object firstElement = Array.get(o, 0);
+                                if (firstElement != null)
+                                {
+                                    final int dimY = Array.getLength(firstElement);
+                                    m.dimensions(new int[]
+                                        { dimX, dimY });
+                                }
+                            }
+                        }
+                    } else if (MDAbstractArray.class.isAssignableFrom(memberClass)
+                            && pojo.length > 0)
+                    {
+                        ReflectionUtils.ensureAccessible(m.fieldOrNull);
+                        final Object o = m.fieldOrNull.get(pojo[0]);
+                        m.dimensions(((MDAbstractArray<?>) o).dimensions());
+                    }
+                }
+            } catch (IllegalAccessException ex)
+            {
+                throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Adds the given <var>hintsOrNull</var> to all <var>mapping</var>.
+     * 
+     * @return <var>mapping</var>.
+     */
+    public static HDF5CompoundMemberMapping[] addHints(HDF5CompoundMemberMapping[] mapping,
+            HDF5CompoundMappingHints hintsOrNull)
+    {
+        if (hintsOrNull != null)
+        {
+            for (HDF5CompoundMemberMapping m : mapping)
+            {
+                m.hints(hintsOrNull);
+            }
+        }
+        return mapping;
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>compoundMap</var>. All
+     * entries that correspond to members with length or dimension information take this information
+     * from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * Map<String, Object> mw = new HashMap<String, Object>();
+     * mw.put("date", new Date());
+     * mw.put("temperatureInDegreeCelsius", 19.5f);
+     * mw.put("voltagesInMilliVolts", new double[][] { 1, 2, 3 }, { 4, 5, 6 } });
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(mw) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Map<String, Object> compoundMap)
+    {
+        return inferMapping(compoundMap, (HDF5CompoundMappingHints) null);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>compoundMap</var>. All
+     * entries that correspond to members with length or dimension information take this information
+     * from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * Map<String, Object> mw = new HashMap<String, Object>();
+     * mw.put("date", new Date());
+     * mw.put("temperatureInDegreeCelsius", 19.5f);
+     * mw.put("voltagesInMilliVolts", new double[][] { 1, 2, 3 }, { 4, 5, 6 } });
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(mw) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final Map<String, Object> compoundMap,
+            final HDF5CompoundMappingHints hintsOrNull)
+    {
+        final List<HDF5CompoundMemberMapping> result =
+                inferMapping(compoundMap.size(), compoundMap.entrySet(), hintsOrNull);
+        Collections.sort(result, new Comparator<HDF5CompoundMemberMapping>()
+            {
+                @Override
+                public int compare(HDF5CompoundMemberMapping o1, HDF5CompoundMemberMapping o2)
+                {
+                    return o1.memberName.compareTo(o2.memberName);
+                }
+            });
+        return result.toArray(new HDF5CompoundMemberMapping[result.size()]);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>memberNames</var> and
+     * <var>memberValues</var>. All entries that correspond to members with length or dimension
+     * information take this information from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * List<String> n = Arrays.asList("date", "temperatureInDegreeCelsius", "voltagesInMilliVolts");
+     * List<Object> l = Arrays. <Object>asList(new Date(), 19.5f, new double[][] { 1, 2, 3 }, { 4, 5, 6 } });
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(n, l) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final List<String> memberNames,
+            final List<?> memberValues)
+    {
+        return inferMapping(memberNames, memberValues, (HDF5CompoundMappingHints) null);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>memberNames</var> and
+     * <var>memberValues</var>. All entries that correspond to members with length or dimension
+     * information take this information from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * List<String> n = Arrays.asList("date", "temperatureInDegreeCelsius", "voltagesInMilliVolts");
+     * List<Object> l = Arrays. <Object>asList(new Date(), 19.5f, new double[][] { 1, 2, 3 }, { 4, 5, 6 } });
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(n, l) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final List<String> memberNames,
+            final List<?> memberValues, final HDF5CompoundMappingHints hintsOrNull)
+    {
+        assert memberNames != null;
+        assert memberValues != null;
+        assert memberNames.size() == memberValues.size();
+
+        final List<HDF5CompoundMemberMapping> result =
+                inferMapping(memberNames.size(), createEntryIterable(memberNames, memberValues),
+                        hintsOrNull);
+        return result.toArray(new HDF5CompoundMemberMapping[result.size()]);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>memberNames</var> and
+     * <var>memberValues</var>. All entries that correspond to members with length or dimension
+     * information take this information from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * String[] n = new String[] { "date", "temperatureInDegreeCelsius", "voltagesInMilliVolts" };
+     * Object[] l = new Object[] { new Date(), 19.5f, new double[][] { 1, 2, 3 }, { 4, 5, 6 } } };
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(n, l) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final String[] memberNames,
+            final Object[] memberValues)
+    {
+        return inferMapping(memberNames, memberValues, (HDF5CompoundMappingHints) null);
+    }
+
+    /**
+     * Returns the inferred compound member mapping for the given <var>memberNames</var> and
+     * <var>memberValues</var>. All entries that correspond to members with length or dimension
+     * information take this information from the values supplied.
+     * <p>
+     * <em>Example:</em>
+     * 
+     * <pre>
+     * String[] n = new String[] { "date", "temperatureInDegreeCelsius", "voltagesInMilliVolts" };
+     * Object[] l = new Object[] { new Date(), 19.5f, new double[][] { 1, 2, 3 }, { 4, 5, 6 } } };
+     * </pre>
+     * 
+     * will lead to:
+     * 
+     * <pre>
+     * inferMapping(n, l) -> { mapping("date").memberClass(Date.class), 
+     *                       mapping("temperatureInDegreeCelsius").memberClass(float.class), 
+     *                       mapping("voltagesInMilliVolts").memberClass(double[][].class).dimensions(new int[] { 3, 3 } }
+     * </pre>
+     */
+    public static HDF5CompoundMemberMapping[] inferMapping(final String[] memberNames,
+            final Object[] memberValues, final HDF5CompoundMappingHints hints)
+    {
+        assert memberNames != null;
+        assert memberValues != null;
+        assert memberNames.length == memberValues.length;
+
+        final List<HDF5CompoundMemberMapping> result =
+                inferMapping(memberNames.length, createEntryIterable(memberNames, memberValues),
+                        hints);
+        return result.toArray(new HDF5CompoundMemberMapping[result.size()]);
+    }
+
+    private static Iterable<Entry<String, Object>> createEntryIterable(
+            final List<String> memberNames, final List<?> memberValues)
+    {
+        return new Iterable<Map.Entry<String, Object>>()
+            {
+                @Override
+                public Iterator<Entry<String, Object>> iterator()
+                {
+                    return new Iterator<Map.Entry<String, Object>>()
+                        {
+                            int idx = -1;
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return idx < memberNames.size() - 1;
+                            }
+
+                            @Override
+                            public Entry<String, Object> next()
+                            {
+                                ++idx;
+                                return new Entry<String, Object>()
+                                    {
+                                        @Override
+                                        public String getKey()
+                                        {
+                                            return memberNames.get(idx);
+                                        }
+
+                                        @Override
+                                        public Object getValue()
+                                        {
+                                            return memberValues.get(idx);
+                                        }
+
+                                        @Override
+                                        public Object setValue(Object value)
+                                        {
+                                            throw new UnsupportedOperationException();
+                                        }
+                                    };
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    private static Iterable<Entry<String, Object>> createEntryIterable(final String[] memberNames,
+            final Object[] memberValues)
+    {
+        return new Iterable<Map.Entry<String, Object>>()
+            {
+                @Override
+                public Iterator<Entry<String, Object>> iterator()
+                {
+                    return new Iterator<Map.Entry<String, Object>>()
+                        {
+                            int idx = -1;
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return idx < memberNames.length - 1;
+                            }
+
+                            @Override
+                            public Entry<String, Object> next()
+                            {
+                                ++idx;
+                                return new Entry<String, Object>()
+                                    {
+                                        @Override
+                                        public String getKey()
+                                        {
+                                            return memberNames[idx];
+                                        }
+
+                                        @Override
+                                        public Object getValue()
+                                        {
+                                            return memberValues[idx];
+                                        }
+
+                                        @Override
+                                        public Object setValue(Object value)
+                                        {
+                                            throw new UnsupportedOperationException();
+                                        }
+                                    };
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    private static List<HDF5CompoundMemberMapping> inferMapping(final int size,
+            final Iterable<Map.Entry<String, Object>> entries,
+            final HDF5CompoundMappingHints hintsOrNull)
+    {
+        final List<HDF5CompoundMemberMapping> result =
+                new ArrayList<HDF5CompoundMemberMapping>(size);
+        for (Map.Entry<String, Object> entry : entries)
+        {
+            final String memberName = entry.getKey();
+            final Object memberValue = entry.getValue();
+            final Class<?> memberClass = HDF5Utils.unwrapClass(memberValue.getClass());
+            HDF5DataTypeVariant variantOrNull;
+            if (memberClass == HDF5TimeDuration.class)
+            {
+                variantOrNull = ((HDF5TimeDuration) memberValue).getUnit().getTypeVariant();
+            } else
+            {
+                variantOrNull = null;
+            }
+            if (memberClass.isArray())
+            {
+                final int lenx = Array.getLength(memberValue);
+                if (lenx > 0 && Array.get(memberValue, 0).getClass().isArray())
+                {
+                    final int leny = Array.getLength(Array.get(memberValue, 0));
+                    result.add(new HDF5CompoundMemberMapping(memberName, memberClass, memberName,
+                            null, null, new int[]
+                                { lenx, leny }, variantOrNull));
+                } else
+                {
+                    result.add(new HDF5CompoundMemberMapping(memberName, memberClass, memberName,
+                            null, null, new int[]
+                                { lenx }, variantOrNull));
+                }
+            } else if (MDAbstractArray.class.isInstance(memberValue))
+            {
+                result.add(new HDF5CompoundMemberMapping(memberName, memberClass, memberName, null,
+                        null, ((MDAbstractArray<?>) memberValue).dimensions(), variantOrNull));
+            } else
+            {
+                HDF5EnumerationType enumTypeOrNull = null;
+                final boolean variableLength =
+                        (hintsOrNull == null) ? false : hintsOrNull.isUseVariableLengthStrings();
+                final int[] dimensions;
+                if (memberClass == HDF5EnumerationValue.class)
+                {
+                    enumTypeOrNull = ((HDF5EnumerationValue) memberValue).getType();
+                    dimensions = new int[0];
+                } else if (memberClass == HDF5EnumerationValueArray.class)
+                {
+                    enumTypeOrNull = ((HDF5EnumerationValueArray) memberValue).getType();
+                    dimensions = new int[]
+                        { ((HDF5EnumerationValueArray) memberValue).getLength() };
+                } else if (memberClass == String.class)
+                {
+                    dimensions = (variableLength) ? new int[0] : new int[]
+                        { ((String) memberValue).length() };
+                } else if (memberClass == BitSet.class)
+                {
+                    final int len = ((BitSet) memberValue).length();
+                    dimensions = new int[]
+                        { len > 0 ? len : 1 };
+                } else
+                {
+                    dimensions = new int[0];
+                }
+                result.add(new HDF5CompoundMemberMapping(memberName, memberClass, memberName,
+                        enumTypeOrNull, null, dimensions, false, variableLength, variantOrNull)
+                        .hints(hintsOrNull));
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Infers a name for a compound type from the given <var>memberNames</var> by concatenating
+     * them.
+     * 
+     * @param memberNames The names of the members to use to build the compound type name from.
+     * @param sort If <code>true</code>, the names will be sorted before they are concatenated.
+     */
+    public static String constructCompoundTypeName(final Collection<String> memberNames,
+            boolean sort)
+    {
+        final Collection<String> names = sort ? sort(memberNames) : memberNames;
+        final StringBuilder b = new StringBuilder();
+        for (String name : names)
+        {
+            b.append(name);
+            b.append(':');
+        }
+        b.setLength(b.length() - 1);
+        return b.toString();
+    }
+
+    private static List<String> sort(Collection<String> memberNames)
+    {
+        final List<String> names = new ArrayList<String>(memberNames);
+        Collections.sort(names);
+        return names;
+    }
+
+    /**
+     * Infers the map from field names to {@link HDF5EnumerationType}s for the given <var>pojo</var>
+     * object.
+     */
+    public static <T> Map<String, HDF5EnumerationType> inferEnumerationTypeMap(T pojo,
+            IHDF5EnumTypeRetriever enumTypeRetriever)
+    {
+        Map<String, HDF5EnumerationType> resultOrNull = null;
+        for (Class<?> c = pojo.getClass(); c != null; c = c.getSuperclass())
+        {
+            for (Field f : c.getDeclaredFields())
+            {
+                if (f.getType() == HDF5EnumerationValue.class)
+                {
+                    ReflectionUtils.ensureAccessible(f);
+                    try
+                    {
+                        if (resultOrNull == null)
+                        {
+                            resultOrNull = new HashMap<String, HDF5EnumerationType>();
+                        }
+                        resultOrNull.put(f.getName(),
+                                ((HDF5EnumerationValue) f.get(pojo)).getType());
+                    } catch (IllegalArgumentException ex)
+                    {
+                        throw new Error(ex);
+                    } catch (IllegalAccessException ex)
+                    {
+                        throw new Error(ex);
+                    }
+                }
+                if (f.getType().isEnum())
+                {
+                    ReflectionUtils.ensureAccessible(f);
+                    try
+                    {
+                        if (resultOrNull == null)
+                        {
+                            resultOrNull = new HashMap<String, HDF5EnumerationType>();
+                        }
+                        resultOrNull.put(f.getName(), enumTypeRetriever.getType(f.getType()
+                                .getSimpleName(), ReflectionUtils.getEnumOptions(asEnumClass(f))));
+                    } catch (IllegalArgumentException ex)
+                    {
+                        throw new Error(ex);
+                    }
+                }
+                if (f.getType() == HDF5EnumerationValueArray.class)
+                {
+                    ReflectionUtils.ensureAccessible(f);
+                    try
+                    {
+                        if (resultOrNull == null)
+                        {
+                            resultOrNull = new HashMap<String, HDF5EnumerationType>();
+                        }
+                        resultOrNull.put(f.getName(),
+                                ((HDF5EnumerationValueArray) f.get(pojo)).getType());
+                    } catch (IllegalArgumentException ex)
+                    {
+                        throw new Error(ex);
+                    } catch (IllegalAccessException ex)
+                    {
+                        throw new Error(ex);
+                    }
+                }
+            }
+        }
+        return resultOrNull;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static Class<? extends Enum<?>> asEnumClass(Field f)
+    {
+        return (Class<? extends Enum<?>>) f.getType();
+    }
+
+    @SuppressWarnings("rawtypes")
+    private final static IdentityHashMap<Class, HDF5DataTypeVariant> typeVariantMap =
+            new IdentityHashMap<Class, HDF5DataTypeVariant>();
+
+    static
+    {
+        typeVariantMap.put(java.util.Date.class,
+                HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH);
+        typeVariantMap.put(HDF5TimeDuration.class, HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS);
+    }
+
+    /**
+     * A {@link HDF5CompoundMemberMapping} that allows to provide an explicit <var>memberName</var>
+     * that differs from the <var>fieldName</var> and the maximal length in case of a String member.
+     * 
+     * @param fieldName The name of the field in the <var>clazz</var>
+     * @param memberClassOrNull The class of the member, if a map is used as the compound pojo.
+     * @param memberName The name of the member in the HDF5 compound data type.
+     * @param memberTypeDimensions The dimensions of the member type, or 0 for a scalar value.
+     */
+    private HDF5CompoundMemberMapping(String fieldName)
+    {
+        this(fieldName, null, null, fieldName, null, null, new int[0], -1, false, false, false,
+                null);
+    }
+
+    /**
+     * A {@link HDF5CompoundMemberMapping} that allows to provide an explicit <var>memberName</var>
+     * that differs from the <var>fieldName</var> and the maximal length in case of a String member.
+     * 
+     * @param fieldName The name of the field in the <var>clazz</var>
+     * @param memberClassOrNull The class of the member, if a map is used as the compound pojo.
+     * @param memberName The name of the member in the HDF5 compound data type.
+     * @param enumTypeOrNull The HDF5 enumeration type of this member.
+     * @param enumTypeNameOrNull The name to be used for the HDF5 enumeration type.
+     * @param memberTypeDimensions The dimensions of the member type, or 0 for a scalar value.
+     * @param unsigned If <code>true</code>, the type will be mapped to an unsigned integer type.
+     * @param typeVariantOrNull The data type variant of this mapping, or <code>null</code> if this
+     *            mapping has no type variant.
+     */
+    private HDF5CompoundMemberMapping(String fieldName, Class<?> memberClassOrNull,
+            String memberName, HDF5EnumerationType enumTypeOrNull, String enumTypeNameOrNull,
+            int[] memberTypeDimensions, HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this(fieldName, null, memberClassOrNull, memberName, enumTypeOrNull, enumTypeNameOrNull,
+                memberTypeDimensions, -1, false, false, false, typeVariantOrNull);
+    }
+
+    /**
+     * A {@link HDF5CompoundMemberMapping} that allows to provide an explicit <var>memberName</var>
+     * that differs from the <var>fieldName</var> and the maximal length in case of a String member.
+     * 
+     * @param fieldName The name of the field in the <var>clazz</var>
+     * @param memberClassOrNull The class of the member, if a map is used as the compound pojo.
+     * @param memberName The name of the member in the HDF5 compound data type.
+     * @param enumTypeOrNull The HDF5 enumeration type of this member.
+     * @param enumTypeNameOrNull The name to be used for the HDF5 enumeration type.
+     * @param memberTypeDimensions The dimensions of the member type, or 0 for a scalar value.
+     * @param unsigned If <code>true</code>, the type will be mapped to an unsigned integer type.
+     * @param variableLength If <code>true</code>, the type will be mapped to a variable-length
+     *            type.
+     * @param typeVariantOrNull The data type variant of this mapping, or <code>null</code> if this
+     *            mapping has no type variant.
+     */
+    private HDF5CompoundMemberMapping(String fieldName, Class<?> memberClassOrNull,
+            String memberName, HDF5EnumerationType enumTypeOrNull, String enumTypeNameOrNull,
+            int[] memberTypeDimensions, boolean unsigned, boolean variableLength,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this(fieldName, null, memberClassOrNull, memberName, enumTypeOrNull, enumTypeNameOrNull,
+                memberTypeDimensions, -1, unsigned, variableLength, false, typeVariantOrNull);
+    }
+
+    /**
+     * A {@link HDF5CompoundMemberMapping} that allows to provide an explicit <var>memberName</var>
+     * that differs from the <var>fieldName</var> and the maximal length in case of a String member.
+     * 
+     * @param fieldName The name of the field in the <var>clazz</var>
+     * @param fieldOrNull The {@link Field} in the compound class (may be <code>null</code>)
+     * @param memberClassOrNull The class of the member, if a map is used as the compound pojo.
+     * @param memberName The name of the member in the HDF5 compound data type.
+     * @param enumTypeOrNull The HDF5 enumeration type of this member.
+     * @param enumTypeNameOrNull The name to be used for the HDF5 enumeration type.
+     * @param memberTypeDimensions The dimensions of the member type, or 0 for a scalar value.
+     * @param unsigned If <code>true</code>, the type will be mapped to an unsigned integer type.
+     * @param variableLength If <code>true</code>, the type will be mapped to a variable-length
+     *            type.
+     * @param reference If <code>true</code>, the type will be mapped to a reference type. type.
+     * @param typeVariantOrNull The data type variant of this mapping, or <code>null</code> if this
+     *            mapping has no type variant.
+     */
+    private HDF5CompoundMemberMapping(String fieldName, Field fieldOrNull,
+            Class<?> memberClassOrNull, String memberName, HDF5EnumerationType enumTypeOrNull,
+            String enumTypeNameOrNull, int[] memberTypeDimensions, boolean unsigned,
+            boolean variableLength, boolean reference, HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this(fieldName, fieldOrNull, memberClassOrNull, memberName, enumTypeOrNull,
+                enumTypeNameOrNull, memberTypeDimensions, -1, unsigned, variableLength, reference,
+                typeVariantOrNull);
+    }
+
+    /**
+     * A {@link HDF5CompoundMemberMapping} that allows to provide an explicit <var>memberName</var>
+     * that differs from the <var>fieldName</var> and the maximal length in case of a String member.
+     * 
+     * @param fieldName The name of the field in the <var>clazz</var>
+     * @param fieldOrNull The {@link Field} in the compound class (may be <code>null</code>)
+     * @param memberClassOrNull The class of the member, if a map is used as the compound pojo.
+     * @param memberName The name of the member in the HDF5 compound data type.
+     * @param enumTypeOrNull The enumeation type (only for enumerations, obviously).
+     * @param enumTypeNameOrNull The name of the committed HDF5 enum type.
+     * @param memberTypeDimensions The dimensions of the member type, or 0 for a scalar value.
+     * @param unsigned If <code>true</code>, the type will be mapped to an unsigned integer type.
+     * @param variableLength If <code>true</code>, the type will be mapped to a variable-length
+     *            string type.
+     * @param reference If <code>true</code>, the type will be mapped to a reference type.
+     * @param storageMemberTypeId The storage data type id of member, or -1, if not available
+     */
+    private HDF5CompoundMemberMapping(String fieldName, Field fieldOrNull,
+            Class<?> memberClassOrNull, String memberName, HDF5EnumerationType enumTypeOrNull,
+            String enumTypeNameOrNull, int[] memberTypeDimensions, int storageMemberTypeId,
+            boolean unsigned, boolean variableLength, boolean reference,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this.fieldOrNull = fieldOrNull;
+        this.fieldName = fieldName;
+        this.memberClassOrNull = memberClassOrNull;
+        this.memberName = memberName;
+        this.enumTypeOrNull = enumTypeOrNull;
+        this.enumTypeNameOrNull = enumTypeNameOrNull;
+        this.memberTypeDimensions = memberTypeDimensions;
+        this.memberTypeLength = MDAbstractArray.getLength(memberTypeDimensions);
+        this.storageDataTypeId = storageMemberTypeId;
+        this.unsigned = unsigned;
+        this.variableLength = variableLength;
+        this.reference = reference;
+        if (typeVariantMap.containsKey(typeVariantOrNull))
+        {
+            this.typeVariantOrNull = typeVariantMap.get(typeVariantOrNull);
+        } else
+        {
+            this.typeVariantOrNull = HDF5DataTypeVariant.maskNull(typeVariantOrNull);
+        }
+    }
+
+    /**
+     * Sets the field name in the Java class to use for the mapping, overriding the member name
+     * which is used by default to find the field.
+     */
+    @SuppressWarnings("hiding")
+    public HDF5CompoundMemberMapping fieldName(String fieldName)
+    {
+        this.fieldName = fieldName;
+        return this;
+    }
+
+    /**
+     * Sets the name to be used for the comitted HDF5 datatype (for Java enum types only, overriding
+     * the simple class name which is used by default as the type name.
+     */
+    public HDF5CompoundMemberMapping enumTypeName(String enumTypeName)
+    {
+        this.enumTypeNameOrNull =
+                (enumTypeName != null && enumTypeName.length() == 0) ? null : enumTypeName;
+        return this;
+    }
+
+    String tryGetEnumTypeName()
+    {
+        return enumTypeNameOrNull;
+    }
+
+    Field tryGetField(Class<?> clazz, boolean skipChecks) throws HDF5JavaException
+    {
+        return tryGetField(clazz, clazz, skipChecks);
+    }
+
+    Field tryGetField(Class<?> clazz) throws HDF5JavaException
+    {
+        return tryGetField(clazz, clazz, false);
+    }
+
+    private Field tryGetField(Class<?> clazz, Class<?> searchClass, boolean skipChecks)
+            throws HDF5JavaException
+    {
+        try
+        {
+            final Field field = clazz.getDeclaredField(fieldName);
+            final boolean isArray = isArray(field);
+            if (skipChecks == false)
+            {
+                if (memberTypeLength > 1)
+                {
+
+                    if (field.getType() != String.class && false == isArray)
+                    {
+                        throw new HDF5JavaException("Field '" + fieldName + "' of class '"
+                                + clazz.getCanonicalName()
+                                + "' is no String or array, but a length > 1 is given.");
+                    }
+
+                } else if (memberTypeLength == 0 && (isFixedLengthString(field) || isArray))
+                {
+                    throw new HDF5JavaException("Field '" + fieldName + "' of class '"
+                            + clazz.getCanonicalName()
+                            + "' is a String or array, but a length == 0 is given.");
+                }
+            }
+            return field;
+        } catch (NoSuchFieldException ex)
+        {
+            final Class<?> superClassOrNull = clazz.getSuperclass();
+            if (superClassOrNull == null || superClassOrNull == Object.class)
+            {
+                return null;
+            } else
+            {
+                return tryGetField(superClassOrNull, searchClass, skipChecks);
+            }
+        }
+    }
+
+    private boolean isArray(final Field field)
+    {
+        final Class<?> fieldType = field.getType();
+        return fieldType.isArray() || MDAbstractArray.class.isAssignableFrom(fieldType)
+                || field.getType() == java.util.BitSet.class
+                || field.getType() == HDF5EnumerationValueArray.class;
+    }
+
+    private boolean isFixedLengthString(final Field field)
+    {
+        return (field.getType() == String.class && false == variableLength && false == reference)
+                && (hintsOrNull != null && false == hintsOrNull.isUseVariableLengthStrings());
+    }
+
+    String getMemberName()
+    {
+        return memberName;
+    }
+
+    /**
+     * Sets the member class to use for the mapping.
+     */
+    public HDF5CompoundMemberMapping memberClass(Class<?> memberClass)
+    {
+        this.memberClassOrNull = memberClass;
+        return this;
+    }
+
+    public Class<?> tryGetMemberClass()
+    {
+        return memberClassOrNull;
+    }
+
+    /**
+     * Sets the length of the member type to use for the mapping. Must be set for String, BitSet.
+     * Can be used as a convenience method replacing {@link #dimensions(int[])} for array members of
+     * rank 1.
+     */
+    @SuppressWarnings("hiding")
+    public HDF5CompoundMemberMapping length(int memberTypeLength)
+    {
+        return dimensions(new int[]
+            { memberTypeLength });
+    }
+
+    int getMemberTypeLength()
+    {
+        return memberTypeLength;
+    }
+
+    /**
+     * Sets the dimensions of the member type to use for the mapping. Convenience method replacing
+     * {@link #dimensions(int[])} for array members of rank 2.
+     */
+    public HDF5CompoundMemberMapping dimensions(int memberTypeDimensionX, int memberTypeDimensionY)
+    {
+        this.memberTypeDimensions = new int[]
+            { memberTypeDimensionX, memberTypeDimensionY };
+        this.memberTypeLength = MDAbstractArray.getLength(memberTypeDimensions);
+        return this;
+    }
+
+    /**
+     * Sets the dimensions of the member type to use for the mapping. Must be set for array members
+     * of rank N.
+     */
+    @SuppressWarnings("hiding")
+    public HDF5CompoundMemberMapping dimensions(int[] memberTypeDimensions)
+    {
+        this.memberTypeDimensions = memberTypeDimensions;
+        this.memberTypeLength = MDAbstractArray.getLength(memberTypeDimensions);
+        if (enumTypeOrNull != null)
+        {
+            checkEnumArrayRank();
+            this.memberClassOrNull = HDF5EnumerationValueArray.class;
+        }
+        return this;
+    }
+
+    /**
+     * Sets this field to an unsigned type. Must be an integer.
+     */
+    public HDF5CompoundMemberMapping unsigned()
+    {
+        this.unsigned = true;
+        return this;
+    }
+
+    /**
+     * Sets this field to an unsigned type, if <var>unsigned</var> is <code>true</code>. Must be an
+     * integer.
+     */
+    public HDF5CompoundMemberMapping unsigned(@SuppressWarnings("hiding")
+    boolean unsigned)
+    {
+        this.unsigned = unsigned;
+        return this;
+    }
+
+    /**
+     * Returns <code>true</code> if this field should be mapped to an unsigned integer.
+     */
+    boolean isUnsigned()
+    {
+        return this.unsigned;
+    }
+
+    /**
+     * Sets this field to a variable-length type. Must be a string.
+     */
+    public HDF5CompoundMemberMapping variableLength()
+    {
+        this.variableLength = true;
+        return this;
+    }
+
+    /**
+     * Sets this field to a variable-length type, if <var>variableLength</var> is <code>true</code>.
+     * Must be a string.
+     */
+    public HDF5CompoundMemberMapping variableLength(@SuppressWarnings("hiding")
+    boolean variableLength)
+    {
+        this.variableLength = variableLength;
+        return this;
+    }
+
+    /**
+     * Returns <code>true</code> if this field should be mapped to a variable-length string.
+     */
+    public boolean isVariableLength()
+    {
+        return this.variableLength;
+    }
+
+    /**
+     * Sets this field to a reference type. Must be a string.
+     */
+    public HDF5CompoundMemberMapping reference()
+    {
+        this.reference = true;
+        return this;
+    }
+
+    /**
+     * Sets this field to a reference type, if <var>reference</var> is <code>true</code>. Must be a
+     * string.
+     */
+    public HDF5CompoundMemberMapping reference(@SuppressWarnings("hiding")
+    boolean reference)
+    {
+        this.reference = reference;
+        return this;
+    }
+
+    /**
+     * Returns <code>true</code> if this field should be mapped to a refernce type.
+     */
+    public boolean isReference()
+    {
+        return this.reference;
+    }
+
+    /**
+     * Sets mapping hints for this mapping.
+     */
+    public HDF5CompoundMemberMapping hints(HDF5CompoundMappingHints hints)
+    {
+        this.hintsOrNull = hints;
+        return this;
+    }
+
+    private void checkEnumArrayRank()
+    {
+        if (memberTypeDimensions != null && memberTypeDimensions.length > 1)
+        {
+            throw new HDF5JavaException("Enumeration arrays only supported with rank 1 [rank="
+                    + memberTypeDimensions.length + "]");
+        }
+    }
+
+    int[] getMemberTypeDimensions()
+    {
+        return memberTypeDimensions;
+    }
+
+    int getStorageDataTypeId()
+    {
+        return storageDataTypeId;
+    }
+
+    /**
+     * Sets the enumeration type to use for the mapping. Must be set for enumeration members.
+     */
+    public HDF5CompoundMemberMapping enumType(HDF5EnumerationType enumType)
+    {
+        this.enumTypeOrNull = enumType;
+        checkEnumArrayRank();
+        this.memberClassOrNull =
+                (memberTypeLength == 0) ? HDF5EnumerationValue.class
+                        : HDF5EnumerationValueArray.class;
+        return this;
+    }
+
+    HDF5EnumerationType tryGetEnumerationType()
+    {
+        return enumTypeOrNull;
+    }
+
+    void setEnumerationType(HDF5EnumerationType enumType)
+    {
+        this.enumTypeOrNull = enumType;
+    }
+
+    HDF5CompoundMappingHints tryGetHints()
+    {
+        return hintsOrNull;
+    }
+
+    /**
+     * Sets the data type variant to use for the mapping.
+     */
+    public HDF5CompoundMemberMapping typeVariant(HDF5DataTypeVariant typeVariant)
+    {
+        this.typeVariantOrNull = typeVariant;
+        return this;
+    }
+
+    HDF5DataTypeVariant tryGetTypeVariant()
+    {
+        return typeVariantOrNull;
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundReader.java
new file mode 100644
index 0000000..b341294
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundReader.java
@@ -0,0 +1,682 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_COMPOUND;
+
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5CompoundReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundReader extends HDF5CompoundInformationRetriever implements IHDF5CompoundReader
+{
+
+    HDF5CompoundReader(HDF5BaseReader baseReader, IHDF5EnumReader enumReader)
+    {
+        super(baseReader, enumReader);
+    }
+
+    @Override
+    public <T> T getAttr(final String objectPath, final String attributeName,
+            final HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return primGetCompoundAttribute(objectPath, attributeName, type, null);
+    }
+
+    @Override
+    public <T> T getAttr(final String objectPath, final String attributeName,
+            final Class<T> pojoClass) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5CompoundType<T> attributeCompoundType =
+                getAttributeType(objectPath, attributeName, pojoClass);
+        attributeCompoundType.checkMappingComplete();
+        return primGetCompoundAttribute(objectPath, attributeName, attributeCompoundType, null);
+    }
+
+    @Override
+    public <T> T[] getArrayAttr(String objectPath, String attributeName, HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return primGetCompoundArrayAttribute(objectPath, attributeName, type, null);
+    }
+
+    @Override
+    public <T> T[] getArrayAttr(String objectPath, String attributeName, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5CompoundType<T> attributeCompoundType =
+                getAttributeType(objectPath, attributeName, pojoClass);
+        attributeCompoundType.checkMappingComplete();
+        return primGetCompoundArrayAttribute(objectPath, attributeName, attributeCompoundType, null);
+    }
+
+    @Override
+    public <T> MDArray<T> getMDArrayAttr(String objectPath, String attributeName,
+            HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return primGetCompoundMDArrayAttribute(objectPath, attributeName, type, null);
+    }
+
+    @Override
+    public <T> MDArray<T> getMDArrayAttr(String objectPath, String attributeName, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5CompoundType<T> attributeCompoundType =
+                getAttributeType(objectPath, attributeName, pojoClass);
+        attributeCompoundType.checkMappingComplete();
+        return primGetCompoundMDArrayAttribute(objectPath, attributeName, attributeCompoundType,
+                null);
+    }
+
+    private <T> T primGetCompoundAttribute(final String objectPath, final String attributeName,
+            final HDF5CompoundType<T> type, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<T> readRunnable = new ICallableWithCleanUp<T>()
+            {
+                @Override
+                public T call(final ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                    checkCompoundType(storageDataTypeId, objectPath, type);
+                    final int nativeDataTypeId = type.getNativeTypeId();
+                    final byte[] byteArr =
+                            baseReader.h5.readAttributeAsByteArray(attributeId, nativeDataTypeId,
+                                    type.getObjectByteifyer().getRecordSizeInMemory());
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArr);
+                    }
+                    final T scalar = type.getObjectByteifyer().arrayifyScalar(storageDataTypeId, byteArr,
+                            type.getCompoundType());
+                    baseReader.h5.reclaimCompoundVL(type, byteArr);
+                    return scalar;
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    private <T> T[] primGetCompoundArrayAttribute(final String objectPath,
+            final String attributeName, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<T[]> readRunnable = new ICallableWithCleanUp<T[]>()
+            {
+                @Override
+                public T[] call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(dataSetId, attributeName, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataType(storageDataTypeId, registry);
+                    final int len;
+                    final int compoundTypeId;
+                    if (baseReader.h5.getClassType(storageDataTypeId) == H5T_ARRAY)
+                    {
+                        final int[] arrayDimensions =
+                                baseReader.h5.getArrayDimensions(storageDataTypeId);
+                        len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+                        compoundTypeId = baseReader.h5.getBaseDataType(storageDataTypeId, registry);
+                        if (baseReader.h5.getClassType(compoundTypeId) != H5T_COMPOUND)
+                        {
+                            throw new HDF5JavaException("Attribute '" + attributeName
+                                    + "' of object '" + objectPath
+                                    + "' is not of type compound array.");
+                        }
+                    } else
+                    {
+                        if (baseReader.h5.getClassType(storageDataTypeId) != H5T_COMPOUND)
+                        {
+                            throw new HDF5JavaException("Attribute '" + attributeName
+                                    + "' of object '" + objectPath
+                                    + "' is not of type compound array.");
+                        }
+                        compoundTypeId = storageDataTypeId;
+                        final long[] arrayDimensions =
+                                baseReader.h5.getDataDimensionsForAttribute(attributeId, registry);
+                        len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+                    }
+                    checkCompoundType(compoundTypeId, objectPath, type);
+                    final byte[] byteArr =
+                            baseReader.h5.readAttributeAsByteArray(attributeId, nativeDataTypeId,
+                                    len * type.getRecordSizeInMemory());
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArr);
+                    }
+                    final T[] array = type.getObjectByteifyer().arrayify(storageDataTypeId, byteArr,
+                            type.getCompoundType());
+                    baseReader.h5.reclaimCompoundVL(type, byteArr);
+                    return array;
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    private <T> MDArray<T> primGetCompoundMDArrayAttribute(final String objectPath,
+            final String attributeName, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<MDArray<T>> readRunnable =
+                new ICallableWithCleanUp<MDArray<T>>()
+                    {
+                        @Override
+                        public MDArray<T> call(final ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(dataSetId, attributeName, registry);
+                            final int storageDataTypeId =
+                                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                            final int nativeDataTypeId =
+                                    baseReader.h5.getNativeDataType(storageDataTypeId, registry);
+                            final int len;
+                            final int[] arrayDimensions;
+                            final int compoundTypeId;
+                            if (baseReader.h5.getClassType(storageDataTypeId) == H5T_ARRAY)
+                            {
+                                arrayDimensions =
+                                        baseReader.h5.getArrayDimensions(storageDataTypeId);
+                                len = MDAbstractArray.getLength(arrayDimensions);
+                                compoundTypeId =
+                                        baseReader.h5.getBaseDataType(storageDataTypeId, registry);
+                                if (baseReader.h5.getClassType(compoundTypeId) != H5T_COMPOUND)
+                                {
+                                    throw new HDF5JavaException("Attribute '" + attributeName
+                                            + "' of object '" + objectPath
+                                            + "' is not of type compound array.");
+                                }
+                            } else
+                            {
+                                if (baseReader.h5.getClassType(storageDataTypeId) != H5T_COMPOUND)
+                                {
+                                    throw new HDF5JavaException("Attribute '" + attributeName
+                                            + "' of object '" + objectPath
+                                            + "' is not of type compound array.");
+                                }
+                                compoundTypeId = storageDataTypeId;
+                                arrayDimensions =
+                                        MDAbstractArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                                attributeId, registry));
+                                len = MDAbstractArray.getLength(arrayDimensions);
+                            }
+                            checkCompoundType(compoundTypeId, objectPath, type);
+                            final byte[] byteArr =
+                                    baseReader.h5.readAttributeAsByteArray(attributeId,
+                                            nativeDataTypeId, len * type.getRecordSizeInMemory());
+                            if (inspectorOrNull != null)
+                            {
+                                inspectorOrNull.inspect(byteArr);
+                            }
+                            final MDArray<T> array = new MDArray<T>(type.getObjectByteifyer().arrayify(
+                                    storageDataTypeId, byteArr, type.getCompoundType()),
+                                    arrayDimensions);
+                            baseReader.h5.reclaimCompoundVL(type, byteArr);
+                            return array;
+                        }
+                    };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public <T> T read(final String objectPath, final HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return read(objectPath, type, null);
+    }
+
+    @Override
+    public <T> T read(final String objectPath, final Class<T> pojoClass) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5CompoundType<T> dataSetCompoundType = getDataSetType(objectPath, pojoClass);
+        dataSetCompoundType.checkMappingComplete();
+        return read(objectPath, dataSetCompoundType, null);
+    }
+
+    @Override
+    public <T> T read(final String objectPath, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        return primReadCompound(objectPath, -1, -1, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readArray(final String objectPath, final HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return readArray(objectPath, type, null);
+    }
+
+    @Override
+    public <T> T[] readArray(final String objectPath, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        return primReadCompoundArray(objectPath, -1, -1, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readArray(final String objectPath, final Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5CompoundType<T> dataSetCompoundType = getDataSetType(objectPath, pojoClass);
+        dataSetCompoundType.checkMappingComplete();
+        return readArray(objectPath, dataSetCompoundType, null);
+    }
+
+    @Override
+    public <T> T[] readArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final int blockSize, final long blockNumber) throws HDF5JavaException
+    {
+        return readArrayBlock(objectPath, type, blockSize, blockNumber, null);
+    }
+
+    @Override
+    public <T> T[] readArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final int blockSize, final long blockNumber, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        return primReadCompoundArray(objectPath, blockSize, blockSize * blockNumber, type,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final int blockSize, final long offset)
+            throws HDF5JavaException
+    {
+        return readArrayBlockWithOffset(objectPath, type, blockSize, offset, null);
+    }
+
+    @Override
+    public <T> T[] readArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final int blockSize, final long offset,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        return primReadCompoundArray(objectPath, blockSize, offset, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(final String objectPath,
+            final HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return getArrayBlocks(objectPath, type, null);
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(final String objectPath,
+            final HDF5CompoundType<T> type, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        return primGetCompoundArrayNaturalBlocks(objectPath, type, params, inspectorOrNull);
+    }
+
+    private <T> Iterable<HDF5DataBlock<T[]>> primGetCompoundArrayNaturalBlocks(
+            final String objectPath, final HDF5CompoundType<T> type,
+            final HDF5NaturalBlock1DParameters params, final IByteArrayInspector inspectorOrNull)
+    {
+        return new Iterable<HDF5DataBlock<T[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<T[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<T[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<T[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final T[] block =
+                                        readArrayBlockWithOffset(objectPath, type,
+                                                index.getBlockSize(), offset, inspectorOrNull);
+                                return new HDF5DataBlock<T[]>(block, index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        final HDF5CompoundType<T> dataSetCompoundType = getDataSetType(objectPath, pojoClass);
+        dataSetCompoundType.checkMappingComplete();
+        return primGetCompoundArrayNaturalBlocks(objectPath, dataSetCompoundType, params, null);
+    }
+
+    private <T> T primReadCompound(final String objectPath, final int blockSize, final long offset,
+            final HDF5CompoundType<T> type, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<T> readRunnable = new ICallableWithCleanUp<T>()
+            {
+                @Override
+                public T call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    checkCompoundType(storageDataTypeId, objectPath, type);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final int nativeDataTypeId = type.getNativeTypeId();
+                    final byte[] byteArr =
+                            new byte[spaceParams.blockSize
+                                    * type.getObjectByteifyer().getRecordSizeInMemory()];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, byteArr);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArr);
+                    }
+                    final T scalar = type.getObjectByteifyer().arrayifyScalar(storageDataTypeId, byteArr,
+                            type.getCompoundType());
+                    baseReader.h5.reclaimCompoundVL(type, byteArr);
+                    return scalar;
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    private <T> T[] primReadCompoundArray(final String objectPath, final int blockSize,
+            final long offset, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        final ICallableWithCleanUp<T[]> readRunnable = new ICallableWithCleanUp<T[]>()
+            {
+                @Override
+                public T[] call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    checkCompoundType(storageDataTypeId, objectPath, type);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final int nativeDataTypeId = type.getNativeTypeId();
+                    final byte[] byteArr =
+                            new byte[spaceParams.blockSize
+                                    * type.getObjectByteifyer().getRecordSizeInMemory()];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, byteArr);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArr);
+                    }
+                    final T[] array = type.getObjectByteifyer().arrayify(storageDataTypeId, byteArr,
+                            type.getCompoundType());
+                    baseReader.h5.reclaimCompoundVL(type, byteArr);
+                    return array;
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    private void checkCompoundType(final int dataTypeId, final String path,
+            final HDF5CompoundType<?> type) throws HDF5JavaException
+    {
+        final boolean isCompound = (baseReader.h5.getClassType(dataTypeId) == H5T_COMPOUND);
+        if (isCompound == false)
+        {
+            throw new HDF5JavaException("Data set '" + path + "' is no compound.");
+        }
+        if (type.isRequireTypesToBeEqual())
+        {
+            final boolean isEqual =
+                    (baseReader.h5.dataTypesAreEqual(dataTypeId, type.getStorageTypeId()));
+            if (isEqual == false)
+            {
+                throw new HDF5JavaException("The compound type '" + type.getName()
+                        + "' does not equal the compound type of data set '" + path + "'.");
+            }
+        }
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArray(final String objectPath, final HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return readMDArrayBlockWithOffset(objectPath, type, null, null, null);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return readMDArrayBlockWithOffset(objectPath, type, null, null, inspectorOrNull);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        final HDF5CompoundType<T> dataSetCompoundType = getDataSetType(objectPath, pojoClass);
+        dataSetCompoundType.checkMappingComplete();
+        return readMDArrayBlockWithOffset(objectPath, dataSetCompoundType, null, null, null);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final int[] blockDimensions, final long[] blockNumber) throws HDF5JavaException
+    {
+        return readMDArrayBlock(objectPath, type, blockDimensions, blockNumber, null);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final int[] blockDimensions, final long[] blockNumber,
+            final IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockDimensions[i] * blockNumber[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, type, blockDimensions, offset,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final int[] blockDimensions, final long[] offset)
+            throws HDF5JavaException
+    {
+        return readMDArrayBlockWithOffset(objectPath, type, blockDimensions, offset, null);
+    }
+
+    @Override
+    public <T> MDArray<T> readMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final int[] dimensionsOrNull,
+            final long[] offsetOrNull, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        final ICallableWithCleanUp<MDArray<T>> readRunnable =
+                new ICallableWithCleanUp<MDArray<T>>()
+                    {
+                        @Override
+                        public MDArray<T> call(final ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final int storageDataTypeId =
+                                    baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                            checkCompoundType(storageDataTypeId, objectPath, type);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offsetOrNull,
+                                            dimensionsOrNull, registry);
+                            final int nativeDataTypeId = type.getNativeTypeId();
+                            final byte[] byteArr =
+                                    new byte[spaceParams.blockSize
+                                            * type.getObjectByteifyer().getRecordSizeInMemory()];
+                            baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, byteArr);
+                            if (inspectorOrNull != null)
+                            {
+                                inspectorOrNull.inspect(byteArr);
+                            }
+                            final MDArray<T> array = new MDArray<T>(type.getObjectByteifyer().arrayify(
+                                    storageDataTypeId, byteArr, type.getCompoundType()),
+                                    spaceParams.dimensions);
+                            baseReader.h5.reclaimCompoundVL(type, byteArr);
+                            return array;
+                        }
+                    };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(final String objectPath,
+            final HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return getMDArrayBlocks(objectPath, type, null);
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(final String objectPath,
+            final HDF5CompoundType<T> type, final IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(objectPath,
+                        DataTypeInfoOptions.MINIMAL, true));
+
+        return primGetCompoundMDArrayNaturalBlocks(objectPath, type, params, inspectorOrNull);
+    }
+
+    private <T> Iterable<HDF5MDDataBlock<MDArray<T>>> primGetCompoundMDArrayNaturalBlocks(
+            final String objectPath, final HDF5CompoundType<T> type,
+            final HDF5NaturalBlockMDParameters params, final IByteArrayInspector inspectorOrNull)
+    {
+        return new Iterable<HDF5MDDataBlock<MDArray<T>>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDArray<T>>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDArray<T>>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDArray<T>> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDArray<T> block =
+                                        readMDArrayBlockWithOffset(objectPath, type,
+                                                index.getBlockSize(), offset, inspectorOrNull);
+                                return new HDF5MDDataBlock<MDArray<T>>(block,
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(String objectPath,
+            Class<T> pojoClass) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(objectPath));
+
+        final HDF5CompoundType<T> dataSetCompoundType = getDataSetType(objectPath, pojoClass);
+        dataSetCompoundType.checkMappingComplete();
+        return primGetCompoundMDArrayNaturalBlocks(objectPath, dataSetCompoundType, params, null);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundType.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundType.java
new file mode 100644
index 0000000..02864b0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundType.java
@@ -0,0 +1,371 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import org.apache.commons.lang.ArrayUtils;
+
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * The definition of a HDF5 compound type. For information on how to create and work with compound
+ * types, have a look at {@link IHDF5CompoundInformationRetriever}. The simplest way of creating a
+ * compound type for a Java class, is
+ * {@link IHDF5CompoundInformationRetriever#getInferredType(Class)}.
+ * <p>
+ * Once you have a compound type, you may use methods like
+ * {@link IHDF5CompoundReader#read(String, HDF5CompoundType)} and
+ * {@link IHDF5CompoundWriter#write(String, HDF5CompoundType, Object)} and to read and write them.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5CompoundType<T> extends HDF5DataType
+{
+    interface IHDF5InternalCompoundMemberInformationRetriever
+    {
+        HDF5CompoundMemberInformation[] getCompoundMemberInformation(
+                final DataTypeInfoOptions dataTypeInfoOptions);
+    }
+
+    private final String nameOrNull;
+
+    private final Class<T> compoundType;
+
+    private final boolean mapAllFields;
+
+    private final HDF5ValueObjectByteifyer<T> objectByteifyer;
+
+    private final IHDF5InternalCompoundMemberInformationRetriever informationRetriever;
+
+    private final boolean requireTypesToBeEqual;
+
+    /**
+     * Creates a new {@link HDF5CompoundType} for the given <var>compoundType</var> and the mapping
+     * defined by <var>members</var>.
+     * 
+     * @param nameOrNull The name of this type, or <code>null</code>, if it is not known.
+     * @param storageTypeId The storage data type id.
+     * @param nativeTypeId The native (memory) data type id.
+     * @param compoundType The Java type that corresponds to this type.
+     * @param requireEqualsType If <code>true</code>, check that this type is equal to the type it
+     *            is used to read.
+     * @param objectByteifer The byteifer to use to convert between the Java object and the HDF5
+     *            file.
+     * @param informationRetriever A role that allows to retrieve compound member information for a
+     *            given compound type id.
+     * @param baseReader The base reader that this types was derived from.
+     */
+    HDF5CompoundType(int fileId, int storageTypeId, int nativeTypeId, String nameOrNull,
+            Class<T> compoundType, boolean requireEqualsType,
+            HDF5ValueObjectByteifyer<T> objectByteifer,
+            IHDF5InternalCompoundMemberInformationRetriever informationRetriever,
+            HDF5BaseReader baseReader)
+    {
+        super(fileId, storageTypeId, nativeTypeId, baseReader);
+        assert compoundType != null;
+        assert objectByteifer != null;
+        assert informationRetriever != null;
+
+        this.nameOrNull = nameOrNull;
+        this.compoundType = compoundType;
+        final CompoundType ct = compoundType.getAnnotation(CompoundType.class);
+        this.requireTypesToBeEqual = requireEqualsType;
+        this.mapAllFields = (ct == null) || ct.mapAllFields();
+        this.objectByteifyer = objectByteifer;
+        this.informationRetriever = informationRetriever;
+    }
+
+    /**
+     * Returns the Java type of the compound.
+     */
+    public Class<T> getCompoundType()
+    {
+        return compoundType;
+    }
+
+    /**
+     * Returns the size of the record on disk (in bytes).
+     * 
+     * @deprecated Use {@link #getRecordSizeOnDisk()} instead.
+     */
+    @Deprecated
+    public int getRecordSize()
+    {
+        return getRecordSizeOnDisk();
+    }
+
+    /**
+     * Returns the size of the record on disk (in bytes).
+     */
+    public int getRecordSizeOnDisk()
+    {
+        return objectByteifyer.getRecordSizeOnDisk();
+    }
+
+    /**
+     * Returns the size of the record in memory (in bytes).
+     */
+    public int getRecordSizeInMemory()
+    {
+        return objectByteifyer.getRecordSizeInMemory();
+    }
+
+    /**
+     * Returns an array with the {@link HDF5CompoundMemberInformation} of all compound members.
+     */
+    public HDF5CompoundMemberInformation[] getCompoundMemberInformation()
+    {
+        return getCompoundMemberInformation(DataTypeInfoOptions.DEFAULT);
+    }
+
+    /**
+     * Returns an array with the {@link HDF5CompoundMemberInformation} of all compound members.
+     */
+    public HDF5CompoundMemberInformation[] getCompoundMemberInformation(
+            final DataTypeInfoOptions options)
+    {
+        return informationRetriever.getCompoundMemberInformation(options);
+    }
+
+    /**
+     * Returns <code>true</code>, if the mapping between the in-memory and the on-disk
+     * representation is incomplete, that is if either {@link #isDiskRepresentationIncomplete()} or
+     * {@link #isMemoryRepresentationIncomplete()} returns <code>true</code>.
+     */
+    public boolean isMappingIncomplete()
+    {
+        return isMemoryRepresentationIncomplete() || isDiskRepresentationIncomplete();
+    }
+
+    /**
+     * Returns <code>true</code> if there are compound members in the on-disk representation that
+     * are not mapped to fields in the in-memory representation.
+     */
+    public boolean isMemoryRepresentationIncomplete()
+    {
+        return objectByteifyer.hasUnmappedMembers();
+    }
+
+    /**
+     * Returns <code>true</code>, if this type is expected to be equal to the type of a data set it
+     * is used to read.
+     */
+    public boolean isRequireTypesToBeEqual()
+    {
+        return requireTypesToBeEqual;
+    }
+
+    /**
+     * Returns an array with the names of compound members that are not mapped to the in-memory
+     * representation. If no members are unmapped, an empty array is returned.
+     */
+    public String[] getUnmappedCompoundMemberNames()
+    {
+        return objectByteifyer.getUnmappedMembers();
+    }
+
+    private Map<String, HDF5CompoundMemberInformation> getCompoundMemberInformationMap()
+    {
+        final Map<String, HDF5CompoundMemberInformation> result =
+                new HashMap<String, HDF5CompoundMemberInformation>();
+        for (HDF5CompoundMemberInformation info : getCompoundMemberInformation())
+        {
+            result.put(info.getName(), info);
+        }
+        return result;
+    }
+
+    /**
+     * Returns an with the {@link HDF5CompoundMemberInformation} of compound members that are not
+     * mapped to the in-memory representation. If no members are unmapped, an empty array is
+     * returned.
+     */
+    public HDF5CompoundMemberInformation[] getUnmappedCompoundMemberInformation()
+    {
+        final String[] unmappedCompoundMemberNames = getUnmappedCompoundMemberNames();
+        if (unmappedCompoundMemberNames.length > 0)
+        {
+            final Map<String, HDF5CompoundMemberInformation> compoundMemberInfoMap =
+                    getCompoundMemberInformationMap();
+            final HDF5CompoundMemberInformation[] result =
+                    new HDF5CompoundMemberInformation[unmappedCompoundMemberNames.length];
+            int idx = 0;
+            for (String name : unmappedCompoundMemberNames)
+            {
+                result[idx++] = compoundMemberInfoMap.get(name);
+            }
+            return result;
+        } else
+        {
+            return new HDF5CompoundMemberInformation[0];
+        }
+    }
+
+    /**
+     * Returns <code>true</code> if there are fields in the in-memory representation that are not
+     * mapped to any compound member in the on-disk representation.
+     */
+    public boolean isDiskRepresentationIncomplete()
+    {
+        return getUnmappedFields().isEmpty() == false;
+    }
+
+    /**
+     * Checks whether the mapping between the on-disk representation and the in-memory
+     * representation is complete.
+     * 
+     * @throws HDF5JavaException if {@link #isMappingIncomplete()} returns <code>true</code>.
+     */
+    public void checkMappingComplete() throws HDF5JavaException
+    {
+        final String[] unmappedMembers = getUnmappedCompoundMemberNames();
+        final String[] unmappedFields = getUnmappedFieldNames();
+        if ((unmappedMembers.length > 0 && mapAllFields) || unmappedFields.length > 0)
+        {
+            final StringBuilder b = new StringBuilder();
+            b.append("Incomplete mapping for compound type '");
+            b.append(getName());
+            b.append("': ");
+            if (unmappedMembers.length > 0)
+            {
+                b.append("unmapped members: ");
+                b.append(ArrayUtils.toString(unmappedMembers));
+            }
+            if (unmappedMembers.length > 0 && unmappedFields.length > 0)
+            {
+                b.append(", ");
+            }
+            if (unmappedFields.length > 0)
+            {
+                b.append("unmapped fields: ");
+                b.append(ArrayUtils.toString(unmappedFields));
+            }
+            throw new HDF5JavaException(b.toString());
+        }
+    }
+
+    /**
+     * Returns an array with names of fields of the in-memory representation that do not map to any
+     * compound member in the on-disk representation.
+     */
+    public String[] getUnmappedFieldNames()
+    {
+        final Set<Field> unmappedFields = getUnmappedFields();
+        final String[] result = new String[unmappedFields.size()];
+        int idx = 0;
+        for (Field field : unmappedFields)
+        {
+            result[idx++] = field.getName();
+        }
+        return result;
+    }
+
+    private Set<Field> getUnmappedFields()
+    {
+        if (Map.class.isAssignableFrom(compoundType) || List.class.isAssignableFrom(compoundType)
+                || compoundType == Object[].class)
+        {
+            return Collections.emptySet();
+        } else
+        {
+            final Set<Field> fieldSet =
+                    new HashSet<Field>(ReflectionUtils.getFieldMap(compoundType, false).values());
+            // If the compound type is annotated with @CompoundType(mapAllFields = false)
+            // then remove all fields that do not have an @CompoundElement annotation
+            if (mapAllFields == false)
+            {
+                final Iterator<Field> it = fieldSet.iterator();
+                while (it.hasNext())
+                {
+                    final Field f = it.next();
+                    final CompoundElement ce = f.getAnnotation(CompoundElement.class);
+                    if (ce == null)
+                    {
+                        it.remove();
+                    }
+                }
+            }
+            for (HDF5MemberByteifyer byteifyer : objectByteifyer.getByteifyers())
+            {
+                fieldSet.remove(byteifyer.tryGetField());
+            }
+            return fieldSet;
+        }
+
+    }
+
+    /**
+     * Returns the byteifyer to convert between the Java type and the HDF5 type.
+     */
+    HDF5ValueObjectByteifyer<T> getObjectByteifyer()
+    {
+        return objectByteifyer;
+    }
+
+    @Override
+    public String tryGetName()
+    {
+        return nameOrNull;
+    }
+
+    /**
+     * Returns the map of member names to enumeration types (only enum members will have an entry in
+     * the map).
+     */
+    public Map<String, HDF5EnumerationType> getEnumTypeMap()
+    {
+        final HDF5MemberByteifyer[] bytefier = objectByteifyer.getByteifyers();
+        final Map<String, HDF5EnumerationType> result =
+                new LinkedHashMap<String, HDF5EnumerationType>();
+        int idx = 0;
+        for (HDF5CompoundMemberInformation info : getCompoundMemberInformation(DataTypeInfoOptions.MINIMAL))
+        {
+            if (info.getType().getDataClass() == HDF5DataClass.ENUM)
+            {
+                result.put(info.getName(), bytefier[idx].tryGetEnumType());
+            }
+            ++idx;
+        }
+        return result;
+    }
+
+    @Override
+    public String toString()
+    {
+        if (nameOrNull != null)
+        {
+            return "HDF5CompoundType [nameOrNull=" + nameOrNull + ", compoundType="
+                    + compoundType.getSimpleName() + ", objectByteifyer=" + objectByteifyer + "]";
+        } else
+        {
+            return "HDF5CompoundType [compoundType=" + compoundType.getSimpleName()
+                    + ", objectByteifyer=" + objectByteifyer + "]";
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundWriter.java
new file mode 100644
index 0000000..e43a0ae
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5CompoundWriter.java
@@ -0,0 +1,993 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+
+import java.util.List;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5CompoundWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5CompoundWriter extends HDF5CompoundReader implements IHDF5CompoundWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5CompoundWriter(HDF5BaseWriter baseWriter, IHDF5EnumWriter enumWriter)
+    {
+        super(baseWriter, enumWriter);
+        this.baseWriter = baseWriter;
+    }
+
+    private <T> HDF5CompoundType<T> getType(final String nameOrNull, final boolean anonymousType,
+            Class<T> pojoClass, final boolean requireEqualsType,
+            HDF5CompoundMemberMapping... members)
+    {
+        baseWriter.checkOpen();
+        final HDF5ValueObjectByteifyer<T> objectByteifyer =
+                baseWriter.createCompoundByteifyers(pojoClass, members, null);
+        final String dataTypeName =
+                anonymousType ? null : (nameOrNull != null) ? nameOrNull
+                        : deriveCompoundNameFromClass(pojoClass);
+        final int storageDataTypeId =
+                getOrCreateCompoundDataType(dataTypeName, objectByteifyer,
+                        baseWriter.keepDataSetIfExists);
+        final int nativeDataTypeId = baseWriter.createNativeCompoundDataType(objectByteifyer);
+        return new HDF5CompoundType<T>(baseWriter.fileId, storageDataTypeId, nativeDataTypeId,
+                dataTypeName, pojoClass, requireEqualsType, objectByteifyer,
+                new HDF5CompoundType.IHDF5InternalCompoundMemberInformationRetriever()
+                    {
+                        @Override
+                        public HDF5CompoundMemberInformation[] getCompoundMemberInformation(
+                                final DataTypeInfoOptions dataTypeOptions)
+                        {
+                            return HDF5CompoundWriter.this.getCompoundMemberInformation(
+                                    storageDataTypeId, nameOrNull, dataTypeOptions);
+                        }
+                    }, baseReader);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getType(final String name, final Class<T> pojoClass,
+            boolean requireTypesToBeEqual, final HDF5CompoundMemberMapping... members)
+    {
+        return getType(name, false, pojoClass, requireTypesToBeEqual, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getType(final String name, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return getType(name, false, pojoClass, true, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAnonType(Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return getType(null, true, pojoClass, true, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredAnonType(Class<T> pojoClass,
+            HDF5CompoundMappingHints hints)
+    {
+        return getType(
+                null,
+                true,
+                pojoClass,
+                true,
+                addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                        HDF5CompoundMemberMapping.inferMapping(pojoClass), hints)));
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredAnonType(Class<T> pojoClass)
+    {
+        return getInferredAnonType(pojoClass, null);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredAnonType(T template)
+    {
+        return getInferredAnonType(template, null);
+    }
+
+    @Override
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    public <T> HDF5CompoundType<T> getInferredAnonType(T pojo, HDF5CompoundMappingHints hints)
+    {
+        if (Map.class.isInstance(pojo))
+        {
+            return (HDF5CompoundType<T>) getType(
+                    null,
+                    true,
+                    Map.class,
+                    true,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                            HDF5CompoundMemberMapping.inferMapping((Map) pojo), hints)));
+        } else
+        {
+            final Class<T> pojoClass = (Class<T>) pojo.getClass();
+            return getType(null, true, pojoClass, true,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(HDF5CompoundMemberMapping
+                            .inferMapping(pojo, HDF5CompoundMemberMapping.inferEnumerationTypeMap(
+                                    pojo, enumTypeRetriever), HDF5CompoundMappingHints
+                                    .isUseVariableLengthStrings(hints)), hints)));
+        }
+    }
+
+    private <T> HDF5CompoundType<T> getType(final String name, final boolean anonymousType,
+            final HDF5CompoundType<T> templateType)
+    {
+        baseWriter.checkOpen();
+        templateType.checkOpen();
+        final HDF5ValueObjectByteifyer<T> objectByteifyer = templateType.getObjectByteifyer();
+        final String dataTypeName =
+                anonymousType ? null : (name == null) ? templateType.getName() : name;
+        final int storageDataTypeId =
+                getOrCreateCompoundDataType(dataTypeName, objectByteifyer,
+                        baseWriter.keepDataSetIfExists);
+        return getType(dataTypeName, storageDataTypeId, templateType.getCompoundType(), true,
+                objectByteifyer);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredAnonType(T[] template)
+    {
+        return getInferredAnonType(template, null);
+    }
+
+    @Override
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    public <T> HDF5CompoundType<T> getInferredAnonType(T[] template, HDF5CompoundMappingHints hints)
+    {
+        final Class<?> componentType = template.getClass().getComponentType();
+        if (template.length == 0)
+        {
+            return (HDF5CompoundType<T>) getInferredAnonType(componentType, hints);
+        }
+        if (Map.class.isAssignableFrom(componentType))
+        {
+            return (HDF5CompoundType<T>) getType(
+                    null,
+                    true,
+                    Map.class,
+                    true,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(
+                            HDF5CompoundMemberMapping.inferMapping((Map) template[0]), hints)));
+        } else
+        {
+            return (HDF5CompoundType<T>) getType(null, true, componentType, true,
+                    addEnumTypes(HDF5CompoundMemberMapping.addHints(HDF5CompoundMemberMapping
+                            .inferMapping(template, HDF5CompoundMemberMapping
+                                    .inferEnumerationTypeMap(template, enumTypeRetriever),
+                                    hints == null ? false : hints.isUseVariableLengthStrings()),
+                            hints)));
+        }
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public HDF5CompoundType<List<?>> getInferredAnonType(List<String> memberNames,
+            List<?> template, HDF5CompoundMappingHints hints)
+    {
+        final HDF5CompoundType<?> type =
+                getType(null, true, List.class, true, HDF5CompoundMemberMapping.addHints(
+                        HDF5CompoundMemberMapping.inferMapping(memberNames, template), hints));
+        return (HDF5CompoundType<List<?>>) type;
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredAnonType(List<String> memberNames, List<?> template)
+    {
+        return getInferredAnonType(memberNames, template, null);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredAnonType(String[] memberNames, Object[] template)
+    {
+        return getInferredAnonType(memberNames, template, null);
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public HDF5CompoundType<Object[]> getInferredAnonType(String[] memberNames, Object[] template,
+            HDF5CompoundMappingHints hints)
+    {
+        final HDF5CompoundType<?> type =
+                getType(null, true, List.class, true, HDF5CompoundMemberMapping.addHints(
+                        HDF5CompoundMemberMapping.inferMapping(memberNames, template), hints));
+        return (HDF5CompoundType<Object[]>) type;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getClonedType(final HDF5CompoundType<T> templateType)
+    {
+        return getType(null, false, templateType);
+    }
+
+    private <T> String deriveCompoundNameFromClass(Class<T> pojoClass)
+    {
+        final CompoundType ct = pojoClass.getAnnotation(CompoundType.class);
+        final String name = (ct != null) ? ct.name() : "";
+        return name.length() == 0 ? pojoClass.getSimpleName() : name;
+    }
+
+    private <T> int getOrCreateCompoundDataType(final String dataTypeName,
+            final HDF5ValueObjectByteifyer<T> objectByteifyer,
+            boolean committedDataTypeHasPreference)
+    {
+        final boolean dataTypeNameGiven =
+                (dataTypeName != null && "UNKNOWN".equals(dataTypeName) == false);
+        final String dataTypePath =
+                dataTypeNameGiven ? HDF5Utils.createDataTypePath(HDF5Utils.COMPOUND_PREFIX,
+                        baseWriter.houseKeepingNameSuffix, dataTypeName) : null;
+        final int committedStorageDataTypeId =
+                dataTypeNameGiven ? baseWriter.getDataTypeId(dataTypePath) : -1;
+        final boolean typeExists = (committedStorageDataTypeId >= 0);
+        int storageDataTypeId = committedStorageDataTypeId;
+        final boolean commitType;
+        if (((typeExists == false) || (committedDataTypeHasPreference == false)))
+        {
+            storageDataTypeId = baseWriter.createStorageCompoundDataType(objectByteifyer);
+            final boolean typesAreEqual =
+                    typeExists
+                            && baseWriter.h5.dataTypesAreEqual(committedStorageDataTypeId,
+                                    storageDataTypeId);
+            commitType = dataTypeNameGiven && ((typeExists == false) || (typesAreEqual == false));
+            if (typeExists && commitType)
+            {
+                final String replacementDataTypePath = baseWriter.moveLinkOutOfTheWay(dataTypePath);
+                baseReader.renameNamedDataType(dataTypePath, replacementDataTypePath);
+            }
+            if (typesAreEqual)
+            {
+                storageDataTypeId = committedStorageDataTypeId;
+            }
+        } else
+        {
+            commitType = false;
+        }
+        if (commitType)
+        {
+            baseWriter.commitDataType(dataTypePath, storageDataTypeId);
+            final HDF5EnumerationValueArray typeVariants =
+                    tryCreateDataTypeVariantArray(objectByteifyer);
+            if (typeVariants != null)
+            {
+                baseWriter.setEnumArrayAttribute(dataTypePath, HDF5Utils
+                        .getTypeVariantMembersAttributeName(baseWriter.houseKeepingNameSuffix),
+                        typeVariants);
+            }
+        }
+        return storageDataTypeId;
+    }
+
+    private <T> HDF5EnumerationValueArray tryCreateDataTypeVariantArray(
+            final HDF5ValueObjectByteifyer<T> objectByteifyer)
+    {
+        final byte[] typeVariantOrdinals = new byte[objectByteifyer.getByteifyers().length];
+        boolean hasTypeVariants = false;
+        for (int i = 0; i < typeVariantOrdinals.length; ++i)
+        {
+            typeVariantOrdinals[i] =
+                    (byte) objectByteifyer.getByteifyers()[i].getTypeVariant().ordinal();
+            hasTypeVariants |= HDF5DataTypeVariant.isTypeVariant(typeVariantOrdinals[i]);
+        }
+        return hasTypeVariants ? new HDF5EnumerationValueArray(baseWriter.typeVariantDataType,
+                typeVariantOrdinals) : null;
+    }
+
+    @Override
+    public <T> void setAttr(final String objectPath, final String attributeName,
+            final HDF5CompoundType<T> type, final T data)
+    {
+        primSetCompoundAttribute(objectPath, attributeName, type, data, null);
+    }
+
+    @Override
+    public <T> void setAttr(final String objectPath, final String attributeName, final T data)
+    {
+        final HDF5CompoundType<T> inferredCompoundType = getInferredType(data);
+        inferredCompoundType.checkMappingComplete();
+        primSetCompoundAttribute(objectPath, attributeName, inferredCompoundType, data, null);
+    }
+
+    @Override
+    public <T> void setArrayAttr(String objectPath, String attributeName, HDF5CompoundType<T> type,
+            T[] value)
+    {
+        baseWriter.setCompoundArrayAttribute(objectPath, attributeName, type, value, null);
+    }
+
+    @Override
+    public <T> void setArrayAttr(String objectPath, String attributeName, T[] value)
+    {
+        @SuppressWarnings("unchecked")
+        final HDF5CompoundType<T> inferredCompoundType =
+                getInferredType((Class<T>) value.getClass().getComponentType());
+        inferredCompoundType.checkMappingComplete();
+        baseWriter.setCompoundArrayAttribute(objectPath, attributeName, inferredCompoundType,
+                value, null);
+    }
+
+    @Override
+    public <T> void setMDArrayAttr(String objectPath, String attributeName,
+            HDF5CompoundType<T> type, MDArray<T> value)
+    {
+        baseWriter.setCompoundMDArrayAttribute(objectPath, attributeName, type, value, null);
+    }
+
+    @Override
+    public <T> void setMDArrayAttr(String objectPath, String attributeName, MDArray<T> value)
+    {
+        @SuppressWarnings("unchecked")
+        final HDF5CompoundType<T> inferredCompoundType =
+                getInferredType((Class<T>) value.getAsFlatArray().getClass().getComponentType());
+        inferredCompoundType.checkMappingComplete();
+        baseWriter.setCompoundMDArrayAttribute(objectPath, attributeName, inferredCompoundType,
+                value, null);
+    }
+
+    private <T> void primSetCompoundAttribute(final String objectPath, final String attributeName,
+            final HDF5CompoundType<?> type, final T data, final IByteArrayInspector inspectorOrNull)
+    {
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        @SuppressWarnings("unchecked")
+        final byte[] byteArray =
+                ((HDF5CompoundType<T>) type).getObjectByteifyer().byteify(type.getStorageTypeId(),
+                        data);
+        if (inspectorOrNull != null)
+        {
+            inspectorOrNull.inspect(byteArray);
+        }
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, attributeName,
+                                        type.getStorageTypeId(), type.getNativeTypeId(),
+                                        dataSpaceId, byteArray, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, attributeName,
+                                        type.getStorageTypeId(), type.getNativeTypeId(), -1,
+                                        byteArray, registry);
+                            }
+                            baseWriter.h5.reclaimCompoundVL(type, byteArray);
+
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public <T> void write(final String objectPath, final HDF5CompoundType<T> type, final T data)
+    {
+        primWriteCompound(objectPath, type, data, null);
+    }
+
+    @Override
+    public <T> void write(final String objectPath, final HDF5CompoundType<T> type, final T data,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        primWriteCompound(objectPath, type, data, inspectorOrNull);
+    }
+
+    private <T> void primWriteCompound(final String objectPath, final HDF5CompoundType<?> type,
+            final T data, final IByteArrayInspector inspectorOrNull)
+    {
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        @SuppressWarnings("unchecked")
+        final byte[] byteArray =
+                ((HDF5CompoundType<T>) type).getObjectByteifyer().byteify(type.getStorageTypeId(),
+                        data);
+        if (inspectorOrNull != null)
+        {
+            inspectorOrNull.inspect(byteArray);
+        }
+        baseWriter.writeScalar(objectPath, type.getStorageTypeId(), type.getNativeTypeId(),
+                byteArray);
+        baseWriter.h5.reclaimCompoundVL(type, byteArray);
+    }
+
+    @Override
+    public <T> void write(String objectPath, T data)
+    {
+        final HDF5CompoundType<T> inferredCompoundType = getInferredType(data);
+        inferredCompoundType.checkMappingComplete();
+        primWriteCompound(objectPath, inferredCompoundType, data, null);
+    }
+
+    @Override
+    public <T> void writeArray(final String objectPath, final HDF5CompoundType<T> type,
+            final T[] data)
+    {
+        primWriteCompoundArray(objectPath, type, data,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION, null);
+    }
+
+    @Override
+    public <T> void writeArray(final String objectPath, final HDF5CompoundType<T> type,
+            final T[] data, final HDF5GenericStorageFeatures features)
+    {
+        primWriteCompoundArray(objectPath, type, data, features, null);
+    }
+
+    @Override
+    public <T> void writeArray(final String objectPath, final HDF5CompoundType<T> type,
+            final T[] data, final HDF5GenericStorageFeatures features,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        primWriteCompoundArray(objectPath, type, data, features, inspectorOrNull);
+    }
+
+    private <T> void primWriteCompoundArray(final String objectPath,
+            final HDF5CompoundType<?> type, final T[] data,
+            final HDF5GenericStorageFeatures features, final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, type.getStorageTypeId(),
+                                    new long[]
+                                        { data.length }, type.getObjectByteifyer()
+                                            .getRecordSizeOnDisk(), features, registry);
+                    @SuppressWarnings("unchecked")
+                    final byte[] byteArray =
+                            ((HDF5CompoundType<T>) type).getObjectByteifyer().byteify(
+                                    type.getStorageTypeId(), data);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeArray(String objectPath, T[] data)
+    {
+        writeArray(objectPath, data, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void writeArray(String objectPath, T[] data, HDF5GenericStorageFeatures features)
+    {
+        assert data != null && data.length > 0;
+
+        final HDF5CompoundType<T> inferredCompoundType = getInferredType(data);
+        inferredCompoundType.checkMappingComplete();
+        primWriteCompoundArray(objectPath, inferredCompoundType, data, features, null);
+    }
+
+    @Override
+    public <T> void writeArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final T[] data, final long blockNumber)
+    {
+        writeArrayBlock(objectPath, type, data, blockNumber, null);
+    }
+
+    @Override
+    public <T> void writeArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final T[] data, final long blockNumber, final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+        assert blockNumber >= 0;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final long size = data.length;
+                    final long[] dimensions = new long[]
+                        { size };
+                    final long[] offset = new long[]
+                        { size * blockNumber };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { data.length * (blockNumber + 1) }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(), data);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final T[] data, final long offset)
+    {
+        writeArrayBlockWithOffset(objectPath, type, data, offset, null);
+    }
+
+    @Override
+    public <T> void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final T[] data, final long offset,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+        assert offset >= 0;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final long size = data.length;
+        final long[] dimensions = new long[]
+            { size };
+        final long[] offsetArray = new long[]
+            { offset };
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + data.length }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offsetArray, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(), data);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void createArray(String objectPath, HDF5CompoundType<T> type, int size)
+    {
+        createArray(objectPath, type, size, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void createArray(final String objectPath, final HDF5CompoundType<T> type,
+            final long size, final int blockSize)
+    {
+        createArray(objectPath, type, size, blockSize,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void createArray(final String objectPath, final HDF5CompoundType<T> type,
+            final long size, final int blockSize, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                            new long[]
+                                { size }, new long[]
+                                { blockSize }, type.getObjectByteifyer().getRecordSizeOnDisk(),
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void createArray(final String objectPath, final HDF5CompoundType<T> type,
+            final long size, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                                new long[]
+                                    { 0 }, new long[]
+                                    { size }, type.getObjectByteifyer().getRecordSizeOnDisk(),
+                                registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                                new long[]
+                                    { size }, null,
+                                type.getObjectByteifyer().getRecordSizeOnDisk(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final MDArray<T> data)
+    {
+        writeMDArray(objectPath, type, data, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void writeMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final MDArray<T> data, final HDF5GenericStorageFeatures features)
+    {
+        writeMDArray(objectPath, type, data, features, null);
+    }
+
+    @Override
+    public <T> void writeMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final MDArray<T> data, final HDF5GenericStorageFeatures features,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        primWriteCompoundMDArray(objectPath, type, data, features, inspectorOrNull);
+    }
+
+    private <T> void primWriteCompoundMDArray(final String objectPath,
+            final HDF5CompoundType<T> type, final MDArray<T> data,
+            final HDF5GenericStorageFeatures features, final IByteArrayInspector inspectorOrNull)
+    {
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, type.getStorageTypeId(),
+                                    MDAbstractArray.toLong(data.dimensions()), type
+                                            .getObjectByteifyer().getRecordSizeOnDisk(), features,
+                                    registry);
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(),
+                                    data.getAsFlatArray());
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final MDArray<T> data, final long[] blockNumber)
+    {
+        writeMDArrayBlock(objectPath, type, data, blockNumber, null);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlock(final String objectPath, final HDF5CompoundType<T> type,
+            final MDArray<T> data, final long[] blockNumber,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        final long[] dataSetDimensions = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+            dataSetDimensions[i] = offset[i] + dimensions[i];
+        }
+        writeCompoundMDArrayBlockWithOffset(objectPath, type, data.getAsFlatArray(), dimensions,
+                offset, dataSetDimensions, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final MDArray<T> data, final long[] offset)
+    {
+        writeMDArrayBlockWithOffset(objectPath, type, data, offset, null);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final MDArray<T> data, final long[] offset,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        final long[] dimensions = data.longDimensions();
+        final long[] dataSetDimensions = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            dataSetDimensions[i] = offset[i] + dimensions[i];
+        }
+        writeCompoundMDArrayBlockWithOffset(objectPath, type, data.getAsFlatArray(), dimensions,
+                offset, dataSetDimensions, inspectorOrNull);
+    }
+
+    private <T> void writeCompoundMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final T[] data, final long[] dimensions,
+            final long[] offset, final long[] dataSetDimensions,
+            final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(), data);
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final MDArray<T> data, final int[] blockDimensions,
+            final long[] offset, final int[] memoryOffset)
+    {
+        writeMDArrayBlockWithOffset(objectPath, type, data, blockDimensions, offset, memoryOffset,
+                null);
+    }
+
+    @Override
+    public <T> void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5CompoundType<T> type, final MDArray<T> data, final int[] blockDimensions,
+            final long[] offset, final int[] memoryOffset, final IByteArrayInspector inspectorOrNull)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    final long[] longBlockDimensions = MDAbstractArray.toLong(blockDimensions);
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId,
+                            MDAbstractArray.toLong(memoryOffset), longBlockDimensions);
+                    final byte[] byteArray =
+                            type.getObjectByteifyer().byteify(type.getStorageTypeId(),
+                                    data.getAsFlatArray());
+                    if (inspectorOrNull != null)
+                    {
+                        inspectorOrNull.inspect(byteArray);
+                    }
+                    H5Dwrite(dataSetId, type.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, byteArray);
+                    baseWriter.h5.reclaimCompoundVL(type, byteArray);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void createMDArray(String objectPath, HDF5CompoundType<T> type, int[] dimensions)
+    {
+        createMDArray(objectPath, type, dimensions,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void createMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final long[] dimensions, final int[] blockDimensions)
+    {
+        createMDArray(objectPath, type, dimensions, blockDimensions,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void createMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final long[] dimensions, final int[] blockDimensions,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                            dimensions, MDAbstractArray.toLong(blockDimensions), type
+                                    .getObjectByteifyer().getRecordSizeOnDisk(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void createMDArray(final String objectPath, final HDF5CompoundType<T> type,
+            final int[] dimensions, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert type != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        type.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                                nullDimensions, MDAbstractArray.toLong(dimensions), type
+                                        .getObjectByteifyer().getRecordSizeOnDisk(), registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, type.getStorageTypeId(), features,
+                                MDAbstractArray.toLong(dimensions), null, type.getObjectByteifyer()
+                                        .getRecordSizeOnDisk(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T> void writeMDArray(String objectPath, MDArray<T> data)
+    {
+        writeMDArray(objectPath, data, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public <T> void writeMDArray(String objectPath, MDArray<T> data,
+            HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null && data.size() > 0;
+
+        baseWriter.checkOpen();
+        final HDF5CompoundType<T> inferredCompoundType = getInferredType(data.getAsFlatArray());
+        inferredCompoundType.checkMappingComplete();
+        primWriteCompoundMDArray(objectPath, inferredCompoundType, data, features, null);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataBlock.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataBlock.java
new file mode 100644
index 0000000..2a2a456
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataBlock.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * A class that is used for iterating over a data set block by block, using
+ * <em>natural data blocks</em>. The <em>Natural block</em> for chunked data sets is a chunk, for
+ * non-chunked data sets it is the complete array.
+ * <p>
+ * The pattern for using this class is:
+ * 
+ * <pre>
+ * for (HDF5DataBlock<int[]> block : reader.getIntNaturalBlocks(dsName1D))
+ * {
+ *     float[] naturalBlock = block.getData();
+ *     ... work on naturalBlock, use block.getIndex() and block.getOffset() where needed ...
+ * }
+ * </pre>
+ * 
+ * <b>Note:</b> If the size of the data set is not an integer number of blocks, then the last block
+ * will be smaller than the natural block size.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5DataBlock<T>
+{
+    private final T data;
+
+    private final long offset;
+
+    private final long index;
+
+    HDF5DataBlock(T block, long index, long offset)
+    {
+        this.data = block;
+        this.index = index;
+        this.offset = offset;
+    }
+
+    /**
+     * Returns the data block itself.
+     */
+    public T getData()
+    {
+        return data;
+    }
+
+    /**
+     * Returns the offset of this block in the data set.
+     */
+    public long getOffset()
+    {
+        return offset;
+    }
+
+    /**
+     * Returns the iteration index of this block, starting with 0.
+     */
+    public long getIndex()
+    {
+        return index;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataClass.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataClass.java
new file mode 100644
index 0000000..cdb2518
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataClass.java
@@ -0,0 +1,284 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_BITFIELD;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_COMPOUND;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ENUM;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_FLOAT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_INTEGER;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_OPAQUE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_REFERENCE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STRING;
+
+import java.util.BitSet;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5CompoundByteifyerFactory.IHDF5CompoundMemberBytifyerFactory;
+
+/**
+ * Identifies the class of a data type. Note that for array types the class of the elements is
+ * identified.
+ * 
+ * @author Bernd Rinn
+ */
+public enum HDF5DataClass
+{
+    // Implementation note: The order matters! ENUM needs to be before INTEGER, as H5Tdetect_class
+    // will return TRUE for ENUM arrays when trying to detect an INTEGER class.
+    BITFIELD(H5T_BITFIELD, new BasicJavaTypeProvider(BitSet.class, null, null, null)), ENUM(
+            H5T_ENUM, new BasicJavaTypeProvider(HDF5EnumerationValue.class,
+                    HDF5EnumerationValueArray.class, null, null)), INTEGER(H5T_INTEGER,
+            new IntJavaTypeProvider()), FLOAT(H5T_FLOAT, new FloatJavaTypeProvider()), STRING(
+            H5T_STRING, new BasicJavaTypeProvider(String.class, String[].class, String[][].class,
+                    MDArray.class)), OPAQUE(H5T_OPAQUE, new BasicJavaTypeProvider(byte.class,
+            byte[].class, byte[][].class, MDByteArray.class)), BOOLEAN(-1,
+            new BasicJavaTypeProvider(boolean.class, BitSet.class, null, null)), COMPOUND(
+            H5T_COMPOUND, new BasicJavaTypeProvider(Map.class, Map[].class, Map[][].class,
+                    MDArray.class)), REFERENCE(H5T_REFERENCE, new BasicJavaTypeProvider(
+            String.class, String[].class, String[][].class, MDArray.class)), OTHER(-1,
+            new BasicJavaTypeProvider(null, null, null, null));
+
+    /**
+     * A role that can provide a java type for a data class, rank and element size.
+     */
+    interface IHDF5JavaTypeProvider
+    {
+        Class<?> tryGetJavaType(int rank, int elementSize, HDF5DataTypeVariant typeVariantOrNull);
+    }
+
+    private final int id;
+
+    private final IHDF5JavaTypeProvider typeProvider;
+
+    HDF5DataClass(int id, IHDF5JavaTypeProvider typeProvider)
+    {
+        this.id = id;
+        this.typeProvider = typeProvider;
+    }
+
+    int getId()
+    {
+        return id;
+    }
+
+    /**
+     * Returns a {@link IHDF5JavaTypeProvider} that returns the default Java type for this data
+     * class.
+     * <p>
+     * Overriding the default for particular choices should be done by one of the
+     * {@link IHDF5CompoundMemberBytifyerFactory}s in
+     * {@link IHDF5CompoundMemberBytifyerFactory#tryGetOverrideJavaType(HDF5DataClass, int, int, HDF5DataTypeVariant)}.
+     */
+    IHDF5JavaTypeProvider getJavaTypeProvider()
+    {
+        return typeProvider;
+    }
+
+    /**
+     * Returns the {@link HDF5DataClass} for the given data <var>classId</var>.
+     * <p>
+     * <b>Note:</b> This method will never return {@link #BOOLEAN}, but instead it will return
+     * {@link #ENUM} for a boolean value as boolean values are actually enums in the HDF5 file.
+     */
+    static HDF5DataClass classIdToDataClass(final int classId)
+    {
+        for (HDF5DataClass clazz : values())
+        {
+            if (clazz.id == classId)
+            {
+                return clazz;
+            }
+        }
+        return OTHER;
+    }
+
+    //
+    // Auxiliary classes
+    //
+
+    private static class BasicJavaTypeProvider implements IHDF5JavaTypeProvider
+    {
+        private final Class<?> javaTypeScalarOrNull;
+
+        private final Class<?> javaType1DArrayOrNull;
+
+        private final Class<?> javaType2DArrayOrNull;
+
+        private final Class<?> javaTypeMDArrayOrNull;
+
+        BasicJavaTypeProvider(Class<?> javaTypeScalarOrNull, Class<?> javaType1DArrayOrNull,
+                Class<?> javaType2DArrayOrNull, Class<?> javaTypeMDArrayOrNull)
+        {
+            this.javaTypeScalarOrNull = javaTypeScalarOrNull;
+            this.javaType1DArrayOrNull = javaType1DArrayOrNull;
+            this.javaType2DArrayOrNull = javaType2DArrayOrNull;
+            this.javaTypeMDArrayOrNull = javaTypeMDArrayOrNull;
+        }
+
+        @Override
+        public Class<?> tryGetJavaType(int rank, int elementSize,
+                HDF5DataTypeVariant typeVariantOrNull)
+        {
+            if (rank == 0)
+            {
+                return javaTypeScalarOrNull;
+            } else if (rank == 1)
+            {
+                return javaType1DArrayOrNull;
+            } else if (rank == 2)
+            {
+                return javaType2DArrayOrNull;
+            } else
+            {
+                return javaTypeMDArrayOrNull;
+            }
+        }
+    }
+
+    private static class IntJavaTypeProvider implements IHDF5JavaTypeProvider
+    {
+        @Override
+        public Class<?> tryGetJavaType(int rank, int elementSize,
+                HDF5DataTypeVariant typeVariantOrNull)
+        {
+            if (rank == 0)
+            {
+                switch (elementSize)
+                {
+                    case 1:
+                        return byte.class;
+                    case 2:
+                        return short.class;
+                    case 4:
+                        return int.class;
+                    case 8:
+                        return long.class;
+                    default:
+                        return null;
+                }
+            } else if (rank == 1)
+            {
+                switch (elementSize)
+                {
+                    case 1:
+                        return byte[].class;
+                    case 2:
+                        return short[].class;
+                    case 4:
+                        return int[].class;
+                    case 8:
+                        return long[].class;
+                    default:
+                        return null;
+                }
+            } else if (rank == 2)
+            {
+                switch (elementSize)
+                {
+                    case 1:
+                        return byte[][].class;
+                    case 2:
+                        return short[][].class;
+                    case 4:
+                        return int[][].class;
+                    case 8:
+                        return long[][].class;
+                    default:
+                        return null;
+                }
+            } else
+            {
+                switch (elementSize)
+                {
+                    case 1:
+                        return MDByteArray.class;
+                    case 2:
+                        return MDShortArray.class;
+                    case 4:
+                        return MDIntArray.class;
+                    case 8:
+                        return MDLongArray.class;
+                    default:
+                        return null;
+                }
+            }
+        }
+    }
+
+    private static class FloatJavaTypeProvider implements IHDF5JavaTypeProvider
+    {
+        @Override
+        public Class<?> tryGetJavaType(int rank, int elementSize,
+                HDF5DataTypeVariant typeVariantOrNull)
+        {
+            if (rank == 0)
+            {
+                switch (elementSize)
+                {
+                    case 4:
+                        return float.class;
+                    case 8:
+                        return double.class;
+                    default:
+                        return null;
+                }
+            } else if (rank == 1)
+            {
+                switch (elementSize)
+                {
+                    case 4:
+                        return float[].class;
+                    case 8:
+                        return double[].class;
+                    default:
+                        return null;
+                }
+            } else if (rank == 2)
+            {
+                switch (elementSize)
+                {
+                    case 4:
+                        return float[][].class;
+                    case 8:
+                        return double[][].class;
+                    default:
+                        return null;
+                }
+            } else
+            {
+                switch (elementSize)
+                {
+                    case 4:
+                        return MDFloatArray.class;
+                    case 8:
+                        return MDDoubleArray.class;
+                    default:
+                        return null;
+                }
+            }
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataSetInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataSetInformation.java
new file mode 100644
index 0000000..0a29dde
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataSetInformation.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+
+/**
+ * A class that holds relevant information about a data set.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5DataSetInformation
+{
+    private final HDF5DataTypeInformation typeInformation;
+
+    private long[] dimensions;
+
+    private long[] maxDimensions;
+
+    private HDF5StorageLayout storageLayout = HDF5StorageLayout.NOT_APPLICABLE;
+
+    private int[] chunkSizesOrNull;
+
+    HDF5DataSetInformation(HDF5DataTypeInformation typeInformation,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this.typeInformation = typeInformation;
+        if (typeVariantOrNull != null)
+        {
+            typeInformation.setTypeVariant(typeVariantOrNull);
+        }
+    }
+
+    /**
+     * Returns the data type information for the data set.
+     */
+    public HDF5DataTypeInformation getTypeInformation()
+    {
+        return typeInformation;
+    }
+
+    /**
+     * Returns the data type variant of this data set, or <code>null</code>, if this data set is not
+     * tagged with a type variant.
+     */
+    public HDF5DataTypeVariant tryGetTypeVariant()
+    {
+        return typeInformation.tryGetTypeVariant();
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is a time stamp, or <code>false</code> otherwise.
+     */
+    public boolean isTimeStamp()
+    {
+        return typeInformation.isTimeStamp();
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is a time duration, or <code>false</code>
+     * otherwise.
+     */
+    public boolean isTimeDuration()
+    {
+        return typeInformation.isTimeDuration();
+    }
+
+    /**
+     * Returns the time unit of the data set, if the data set is a time duration, or
+     * <code>null</code> otherwise.
+     */
+    public HDF5TimeUnit tryGetTimeUnit()
+    {
+        return typeInformation.tryGetTimeUnit();
+    }
+
+    /**
+     * Returns the array dimensions of the data set.
+     */
+    public long[] getDimensions()
+    {
+        return dimensions;
+    }
+
+    void setDimensions(long[] dimensions)
+    {
+        this.dimensions = dimensions;
+    }
+
+    /**
+     * Returns the largest possible array dimensions of the data set.
+     */
+    public long[] getMaxDimensions()
+    {
+        return maxDimensions;
+    }
+
+    void setMaxDimensions(long[] maxDimensions)
+    {
+        this.maxDimensions = maxDimensions;
+    }
+
+    void setStorageLayout(HDF5StorageLayout storageLayout)
+    {
+        this.storageLayout = storageLayout;
+    }
+
+    /**
+     * Returns the storage layout of the data set in the HDF5 file.
+     */
+    public HDF5StorageLayout getStorageLayout()
+    {
+        return storageLayout;
+    }
+
+    /**
+     * Returns the chunk size in each array dimension of the data set, or <code>null</code>, if the
+     * data set is not of {@link HDF5StorageLayout#CHUNKED}.
+     */
+    public int[] tryGetChunkSizes()
+    {
+        return chunkSizesOrNull;
+    }
+
+    void setChunkSizes(int[] chunkSizes)
+    {
+        this.chunkSizesOrNull = chunkSizes;
+    }
+
+    /**
+     * Returns the rank (number of axis) of this data set.
+     */
+    public int getRank()
+    {
+        return dimensions.length;
+    }
+
+    /**
+     * Returns <code>true</code>, if the rank of this data set is 0.
+     */
+    public boolean isScalar()
+    {
+        return dimensions.length == 0;
+    }
+    
+    /**
+     * Returns <code>true</code>, if this data set type has a sign anf <code>false</code> otherwise.
+     */
+    public boolean isSigned()
+    {
+        return typeInformation.isSigned();
+    }
+
+    /**
+     * Returns the one-dimensional length of the multi-dimensional array defined by
+     * <var>dimensions</var>.
+     */
+    private static long getLength(final long[] dimensions)
+    {
+        assert dimensions != null;
+
+        if (dimensions.length == 0) // NULL data space needs to be treated differently
+        {
+            return 0;
+        }
+        long length = dimensions[0];
+        for (int i = 1; i < dimensions.length; ++i)
+        {
+            length *= dimensions[i];
+        }
+        return length;
+    }
+
+    /**
+     * Returns the total number of elements of this data set.
+     */
+    public long getNumberOfElements()
+    {
+        return getLength(dimensions);
+    }
+
+    /**
+     * Returns the total size (in bytes) of this data set.
+     */
+    public long getSize()
+    {
+        return getLength(dimensions) * typeInformation.getElementSize();
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (obj == null || obj instanceof HDF5DataSetInformation == false)
+        {
+            return false;
+        }
+        final HDF5DataSetInformation that = (HDF5DataSetInformation) obj;
+        final EqualsBuilder builder = new EqualsBuilder();
+        builder.append(typeInformation, that.typeInformation);
+        builder.append(dimensions, that.dimensions);
+        builder.append(maxDimensions, that.maxDimensions);
+        return builder.isEquals();
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final HashCodeBuilder builder = new HashCodeBuilder();
+        builder.append(typeInformation);
+        builder.append(dimensions);
+        builder.append(maxDimensions);
+        return builder.toHashCode();
+    }
+
+    @Override
+    public String toString()
+    {
+        return typeInformation.toString() + ":" + ArrayUtils.toString(dimensions);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataType.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataType.java
new file mode 100644
index 0000000..6a4eb56
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataType.java
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * The abstract base class of Java wrappers for HDF data types.
+ * 
+ * @author Bernd Rinn
+ */
+public abstract class HDF5DataType
+{
+
+    private int fileId;
+
+    private int storageTypeId;
+
+    private int nativeTypeId;
+
+    private final HDF5BaseReader baseReader;
+
+    HDF5DataType(int fileId, int storageTypeId, int nativeTypeId, HDF5BaseReader baseReader)
+    {
+        assert fileId >= 0;
+
+        this.fileId = fileId;
+        this.storageTypeId = storageTypeId;
+        this.nativeTypeId = nativeTypeId;
+        this.baseReader = baseReader;
+        baseReader.fileRegistry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    HDF5DataType.this.fileId = -1;
+                    HDF5DataType.this.storageTypeId = -1;
+                    HDF5DataType.this.nativeTypeId = -1;
+                }
+            });
+    }
+
+    /**
+     * Returns the storage data type id of this type.
+     */
+    int getStorageTypeId()
+    {
+        return storageTypeId;
+    }
+
+    /**
+     * Returns the native data type id of this type.
+     */
+    int getNativeTypeId()
+    {
+        return nativeTypeId;
+    }
+
+    /**
+     * Checks whether this type is for file <var>expectedFileId</var>.
+     * 
+     * @throws HDF5JavaException If this type is not for file <var>expectedFileId</var>.
+     */
+    void check(final int expectedFileId) throws HDF5JavaException
+    {
+        if (fileId < 0)
+        {
+            throw new HDF5JavaException("Type " + getName() + " is closed.");
+        }
+        if (fileId != expectedFileId)
+        {
+            throw new HDF5JavaException("Type " + getName() + " is not from this file.");
+        }
+    }
+
+    /**
+     * Checks whether this type is open.
+     * 
+     * @throws HDF5JavaException If this type is not open.
+     */
+    void checkOpen() throws HDF5JavaException
+    {
+        if (fileId < 0)
+        {
+            throw new HDF5JavaException("Type " + getName() + " is closed.");
+        }
+    }
+
+    /**
+     * Returns a name for this type, or <code>null</code> if this type has no name.
+     */
+    public abstract String tryGetName();
+
+    /**
+     * Returns a name for this type, or <code>UNKNOWN</code if this type has no name.
+     */
+    public String getName()
+    {
+        final String nameOrNull = tryGetName();
+        return (nameOrNull == null) ? "UNKNOWN" : nameOrNull;
+    }
+
+    /**
+     * Returns the data type path of this type, or <code>null</code>, if this type is not a comitted
+     * data type.
+     */
+    public String tryGetDataTypePath()
+    {
+        return getDataTypeInformation(DataTypeInfoOptions.PATH).tryGetDataTypePath();
+    }
+
+    /**
+     * Returns the data type information for this data type.
+     * 
+     * @param dataTypeInfoOptions The options that decide how much information to fetch.
+     */
+    public HDF5DataTypeInformation getDataTypeInformation(
+            final DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return baseReader.getDataTypeInformation(storageTypeId, dataTypeInfoOptions);
+    }
+
+    /**
+     * Returns the data type information (with {@link DataTypeInfoOptions#DEFAULT}) for this data
+     * type.
+     */
+    public HDF5DataTypeInformation getDataTypeInformation()
+    {
+        return baseReader.getDataTypeInformation(storageTypeId, DataTypeInfoOptions.DEFAULT);
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + fileId;
+        result = prime * result + storageTypeId;
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        HDF5DataType other = (HDF5DataType) obj;
+        if (fileId != other.fileId)
+        {
+            return false;
+        }
+        if (storageTypeId != other.storageTypeId)
+        {
+            return false;
+        }
+        return true;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeInformation.java
new file mode 100644
index 0000000..d343f82
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeInformation.java
@@ -0,0 +1,595 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import org.apache.commons.lang.ObjectUtils;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class that holds relevant information about a data type.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5DataTypeInformation
+{
+    /**
+     * An object that represents the options for a data type information object.
+     * 
+     * @author Bernd Rinn
+     */
+    public static final class DataTypeInfoOptions
+    {
+        public static final DataTypeInfoOptions MINIMAL = new DataTypeInfoOptions(false, false);
+
+        public static final DataTypeInfoOptions ALL = new DataTypeInfoOptions(true, true);
+
+        public static final DataTypeInfoOptions DEFAULT = new DataTypeInfoOptions(false, true);
+
+        public static final DataTypeInfoOptions PATH = new DataTypeInfoOptions(true, false);
+
+        private boolean knowsDataTypePath;
+
+        private boolean knowsDataTypeVariant;
+
+        DataTypeInfoOptions(boolean knowsDataTypePath, boolean knowsDataTypeVariant)
+        {
+            this.knowsDataTypePath = knowsDataTypePath;
+            this.knowsDataTypeVariant = knowsDataTypeVariant;
+        }
+
+        DataTypeInfoOptions()
+        {
+            knowsDataTypePath = false;
+            knowsDataTypeVariant = true;
+        }
+
+        public DataTypeInfoOptions path(boolean readDataTypePath)
+        {
+            this.knowsDataTypePath = readDataTypePath;
+            return this;
+        }
+
+        public DataTypeInfoOptions path()
+        {
+            this.knowsDataTypePath = true;
+            return this;
+        }
+
+        public DataTypeInfoOptions variant(boolean readDataTypeVariant)
+        {
+            this.knowsDataTypeVariant = readDataTypeVariant;
+            return this;
+        }
+
+        public DataTypeInfoOptions noVariant()
+        {
+            this.knowsDataTypeVariant = false;
+            return this;
+        }
+
+        public DataTypeInfoOptions all()
+        {
+            this.knowsDataTypePath = true;
+            this.knowsDataTypeVariant = true;
+            return this;
+        }
+
+        public DataTypeInfoOptions nothing()
+        {
+            this.knowsDataTypePath = false;
+            this.knowsDataTypeVariant = false;
+            return this;
+        }
+
+        public boolean knowsDataTypePath()
+        {
+            return knowsDataTypePath;
+        }
+
+        public boolean knowsDataTypeVariant()
+        {
+            return knowsDataTypeVariant;
+        }
+
+    }
+
+    /**
+     * Returns a new {@link DataTypeInfoOptions} object.
+     */
+    public static DataTypeInfoOptions options()
+    {
+        return new DataTypeInfoOptions();
+    }
+
+    private final boolean arrayType;
+
+    private final boolean signed;
+
+    private final boolean variableLengthString;
+
+    private final String dataTypePathOrNull;
+
+    private final String nameOrNull;
+
+    private final HDF5DataClass dataClass;
+
+    private int elementSize;
+
+    private int numberOfElements;
+
+    private CharacterEncoding encoding;
+
+    private int[] dimensions;
+
+    private String opaqueTagOrNull;
+
+    private final DataTypeInfoOptions options;
+
+    private HDF5DataTypeVariant typeVariantOrNull;
+
+    HDF5DataTypeInformation(String dataTypePathOrNull, DataTypeInfoOptions options,
+            HDF5DataClass dataClass, String houseKeepingNameSuffix, int elementSize, boolean signed)
+    {
+        this(dataTypePathOrNull, options, dataClass, CharacterEncoding.ASCII,
+                houseKeepingNameSuffix, elementSize, 1, new int[0], false, signed, false, null);
+    }
+
+    HDF5DataTypeInformation(HDF5DataClass dataClass, String houseKeepingNameSuffix,
+            int elementSize, boolean signed)
+    {
+        this(null, DataTypeInfoOptions.ALL, dataClass, CharacterEncoding.ASCII,
+                houseKeepingNameSuffix, elementSize, 1, new int[0], false, signed, false, null);
+    }
+
+    HDF5DataTypeInformation(HDF5DataClass dataClass, String houseKeepingNameSuffix,
+            int elementSize, int numberOfElements, boolean signed)
+    {
+        this(null, DataTypeInfoOptions.ALL, dataClass, CharacterEncoding.ASCII,
+                houseKeepingNameSuffix, elementSize, numberOfElements, new int[]
+                    { numberOfElements }, false, signed, false, null);
+
+    }
+
+    HDF5DataTypeInformation(String dataTypePathOrNull, DataTypeInfoOptions options,
+            HDF5DataClass dataClass, CharacterEncoding encoding, String houseKeepingNameSuffix,
+            int elementSize, boolean signed, boolean variableLengthString,
+            String opaqueTagOrNull)
+    {
+        this(dataTypePathOrNull, options, dataClass, encoding, houseKeepingNameSuffix, elementSize,
+                1, new int[0], false, signed, variableLengthString, opaqueTagOrNull);
+    }
+
+    HDF5DataTypeInformation(String dataTypePathOrNull, DataTypeInfoOptions options,
+            HDF5DataClass dataClass, CharacterEncoding encoding, String houseKeepingNameSuffix,
+            int elementSize, int[] dimensions, boolean arrayType, boolean signed,
+            boolean variableLengthString)
+    {
+        this(dataTypePathOrNull, options, dataClass, encoding, houseKeepingNameSuffix, elementSize,
+                MDAbstractArray.getLength(dimensions), dimensions, arrayType, signed,
+                variableLengthString, null);
+
+    }
+
+    private HDF5DataTypeInformation(String dataTypePathOrNull, DataTypeInfoOptions options,
+            HDF5DataClass dataClass, CharacterEncoding encoding, String houseKeepingNameSuffix,
+            int elementSize, int numberOfElements, int[] dimensions, boolean arrayType,
+            boolean signed, boolean variableLengthString, String opaqueTagOrNull)
+    {
+        if (dataClass == HDF5DataClass.BOOLEAN || dataClass == HDF5DataClass.STRING)
+        {
+            this.dataTypePathOrNull = null;
+            this.nameOrNull = null;
+        } else
+        {
+            this.dataTypePathOrNull = dataTypePathOrNull;
+            this.nameOrNull =
+                    HDF5Utils.tryGetDataTypeNameFromPath(dataTypePathOrNull,
+                            houseKeepingNameSuffix, dataClass);
+        }
+        this.arrayType = arrayType;
+        this.signed = signed;
+        this.variableLengthString = variableLengthString;
+        this.dataClass = dataClass;
+        this.elementSize = elementSize;
+        this.numberOfElements = numberOfElements;
+        this.dimensions = dimensions;
+        this.encoding = encoding;
+        this.opaqueTagOrNull = opaqueTagOrNull;
+        this.options = options;
+    }
+
+    /**
+     * Returns the raw data class (<code>INTEGER</code>, <code>FLOAT</code>, ...) of this type.
+     * <p>
+     * May differ from {@link #getDataClass()} if it is the type of a scaled enum (
+     * {@link HDF5DataTypeVariant#ENUM} or a scaled bitfield (@link
+     * {@link HDF5DataTypeVariant#BITFIELD}.
+     */
+    public HDF5DataClass getRawDataClass()
+    {
+        return dataClass;
+    }
+
+    /**
+     * Returns the data class (<code>INTEGER</code>, <code>FLOAT</code>, ...) of this type.
+     */
+    public HDF5DataClass getDataClass()
+    {
+        if (typeVariantOrNull == HDF5DataTypeVariant.ENUM)
+        {
+            return HDF5DataClass.ENUM;
+        } else if (typeVariantOrNull == HDF5DataTypeVariant.BITFIELD)
+        {
+            return HDF5DataClass.BITFIELD;
+        } else
+        {
+            return dataClass;
+        }
+    }
+
+    /**
+     * Returns the size of one element (in bytes) of this type. For strings, the total length.
+     */
+    public int getElementSize()
+    {
+        return elementSize;
+    }
+
+    /**
+     * The length that is usable. Usually equals to {@link #getElementSize()}, except for Strings,
+     * where it takes into account the character encoding.
+     */
+    public int getUsableLength()
+    {
+        if (dataClass == HDF5DataClass.STRING && elementSize > 0)
+        {
+            return variableLengthString ? -1 : elementSize / encoding.getMaxBytesPerChar();
+        } else
+        {
+            return elementSize;
+        }
+    }
+
+    /**
+     * The element size as is relevant for padding to ensure memory alignment.
+     */
+    public int getElementSizeForPadding()
+    {
+        // Variable-length strings store a pointer.
+        if (variableLengthString)
+        {
+            return HDFNativeData.getMachineWordSize();
+        }
+        // Fixed-length strings are accessing single bytes.
+        if (dataClass == HDF5DataClass.STRING)
+        {
+            return 1;
+        }
+        // Otherwise: use elementSize.
+        return elementSize;
+    }
+
+    void setElementSize(int elementSize)
+    {
+        this.elementSize = elementSize;
+    }
+
+    /**
+     * Returns the number of elements of this type.
+     * <p>
+     * This will be 1 except for array data types.
+     */
+    public int getNumberOfElements()
+    {
+        return numberOfElements;
+    }
+
+    /**
+     * Returns the total size (in bytes) of this data set.
+     */
+    public int getSize()
+    {
+        return elementSize * numberOfElements;
+    }
+
+    /**
+     * Returns the rank (number of dimensions) of this type (0 for a scalar type).
+     */
+    public int getRank()
+    {
+        return dimensions.length;
+    }
+
+    /**
+     * Returns the dimensions along each axis of this type (an empty array for a scalar type).
+     */
+    public int[] getDimensions()
+    {
+        return dimensions;
+    }
+
+    void setDimensions(int[] dimensions)
+    {
+        this.dimensions = dimensions;
+        this.numberOfElements = MDAbstractArray.getLength(dimensions);
+    }
+
+    /**
+     * Returns <code>true</code> if this type is an HDF5 array type.
+     */
+    public boolean isArrayType()
+    {
+        return arrayType;
+    }
+
+    /**
+     * Returns <code>true</code> if this type is an HDF5 VL (variable-length) type.
+     * 
+     * @deprecated Use {@link #isVariableLengthString()} instead.
+     */
+    @Deprecated
+    public boolean isVariableLengthType()
+    {
+        return variableLengthString;
+    }
+
+    /**
+     * Returns <code>true</code>, if this data set type has a sign anf <code>false</code> otherwise.
+     */
+    public boolean isSigned()
+    {
+        return signed;
+    }
+
+    /**
+     * Returns <code>true</code>, if this data set type is a variable-length string, or
+     * <code>false</code> otherwise.
+     */
+    public boolean isVariableLengthString()
+    {
+        return variableLengthString;
+    }
+
+    /**
+     * Returns the tag of an opaque data type, or <code>null</code>, if this data type is not
+     * opaque.
+     */
+    public String tryGetOpaqueTag()
+    {
+        return opaqueTagOrNull;
+    }
+
+    /**
+     * Returns whether the data type path has been determined.
+     * <p>
+     * A return value of <code>true</code> does <i>not necessarily</i> mean that
+     * {@link #tryGetDataTypePath()} will return a value other than <code>null</code>, but a return
+     * value of <code>false</code> means that this method will always return <code>null</code>.
+     */
+    public boolean knowsDataTypePath()
+    {
+        return options.knowsDataTypePath();
+    }
+
+    /**
+     * If this is a committed (named) data type and {@link #knowsDataTypePath()} ==
+     * <code>true</code>, return the path of the data type. Otherwise <code>null</code> is returned.
+     */
+    public String tryGetDataTypePath()
+    {
+        return dataTypePathOrNull;
+    }
+
+    /**
+     * Returns the name of this datatype, if it is a committed data type.
+     */
+    public String tryGetName()
+    {
+        return nameOrNull;
+    }
+
+    /**
+     * Returns whether the data type variant has been determined.
+     * <p>
+     * A return value of <code>true</code> does <i>not necessarily</i> mean that
+     * {@link #tryGetTypeVariant()} will return a value other than <code>null</code>, but a return
+     * value of <code>false</code> means that this method will always return <code>null</code>.
+     */
+    public boolean knowsDataTypeVariant()
+    {
+        return options.knowsDataTypeVariant;
+    }
+
+    /**
+     * Returns the {@link HDF5DataTypeVariant}, or <code>null</code>, if this type has no variant or
+     * {@link #knowsDataTypeVariant()} == <code>false</code>.
+     */
+    public HDF5DataTypeVariant tryGetTypeVariant()
+    {
+        if (typeVariantOrNull == null && options.knowsDataTypeVariant())
+        {
+            return HDF5DataTypeVariant.NONE;
+        } else
+        {
+            return typeVariantOrNull;
+        }
+    }
+
+    private HDF5DataTypeVariant tryGetTypeVariantReplaceNoneWithNull()
+    {
+        return (typeVariantOrNull == HDF5DataTypeVariant.NONE) ? null : typeVariantOrNull;
+    }
+
+    void setTypeVariant(HDF5DataTypeVariant typeVariant)
+    {
+        this.typeVariantOrNull = typeVariant;
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is a time stamp, or <code>false</code> otherwise.
+     */
+    public boolean isTimeStamp()
+    {
+        return (typeVariantOrNull != null) ? typeVariantOrNull.isTimeStamp() : false;
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is a time duration, or <code>false</code>
+     * otherwise.
+     */
+    public boolean isTimeDuration()
+    {
+        return (typeVariantOrNull != null) ? typeVariantOrNull.isTimeDuration() : false;
+    }
+
+    /**
+     * Returns the time unit of the data set, if the data set is a time duration, or
+     * <code>null</code> otherwise.
+     */
+    public HDF5TimeUnit tryGetTimeUnit()
+    {
+        return (typeVariantOrNull != null) ? typeVariantOrNull.tryGetTimeUnit() : null;
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is an enumeration type.
+     */
+    public boolean isEnum()
+    {
+        return getDataClass() == HDF5DataClass.ENUM;
+    }
+
+    /**
+     * Returns <code>true</code>, if the data set is a bitfield type.
+     */
+    public boolean isBitField()
+    {
+        return getDataClass() == HDF5DataClass.BITFIELD;
+    }
+
+    /**
+     * Returns an appropriate Java type, or <code>null</code>, if this HDF5 type has no appropriate
+     * Java type.
+     */
+    public Class<?> tryGetJavaType()
+    {
+        final int rank = (dimensions.length == 1 && dimensions[0] == 1) ? 0 : dimensions.length;
+        final Class<?> overrideDataTypeOrNull =
+                HDF5CompoundByteifyerFactory.tryGetOverrideJavaType(dataClass, rank, elementSize,
+                        typeVariantOrNull);
+        if (overrideDataTypeOrNull != null)
+        {
+            return overrideDataTypeOrNull;
+        } else
+        {
+            return dataClass.getJavaTypeProvider().tryGetJavaType(rank, elementSize,
+                    typeVariantOrNull);
+        }
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (obj == null || obj instanceof HDF5DataTypeInformation == false)
+        {
+            return false;
+        }
+        final HDF5DataTypeInformation that = (HDF5DataTypeInformation) obj;
+        final HDF5DataTypeVariant thisTypeVariant = tryGetTypeVariant();
+        final HDF5DataTypeVariant thatTypeVariant = that.tryGetTypeVariant();
+        return dataClass.equals(that.dataClass) && elementSize == that.elementSize
+                && encoding == that.encoding && numberOfElements == that.numberOfElements
+                && ObjectUtils.equals(nameOrNull, that.nameOrNull)
+                && ObjectUtils.equals(dataTypePathOrNull, that.dataTypePathOrNull)
+                && ObjectUtils.equals(thisTypeVariant, thatTypeVariant);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final HDF5DataTypeVariant typeVariant = tryGetTypeVariant();
+        return ((((((17 * 59 + dataClass.hashCode()) * 59 + elementSize) * 59 + ObjectUtils
+                .hashCode(encoding)) * 59 + numberOfElements) * 59 + ObjectUtils
+                .hashCode(nameOrNull)) * 59 + ObjectUtils.hashCode(dataTypePathOrNull) * 59)
+                + ObjectUtils.hashCode(typeVariant);
+    }
+
+    @Override
+    public String toString()
+    {
+        final String name;
+        if (nameOrNull != null)
+        {
+            name = "<" + nameOrNull + ">";
+        } else
+        {
+            name = "";
+        }
+        final HDF5DataTypeVariant variantOrNull = tryGetTypeVariantReplaceNoneWithNull();
+        if (numberOfElements == 1)
+        {
+            if (variantOrNull != null)
+            {
+                return name + dataClass + "(" + getUsableLength() + ")/" + variantOrNull.toString();
+            } else
+            {
+                return name + dataClass + "(" + getUsableLength() + ")";
+            }
+        } else if (dimensions.length == 1)
+        {
+            if (variantOrNull != null)
+            {
+                return name + dataClass + "(" + getUsableLength() + ", #" + numberOfElements + ")/"
+                        + variantOrNull.toString();
+            } else
+            {
+                return name + dataClass + "(" + getUsableLength() + ", #" + numberOfElements + ")";
+            }
+        } else
+        {
+            final StringBuilder builder = new StringBuilder();
+            builder.append(name);
+            builder.append(dataClass.toString());
+            builder.append('(');
+            builder.append(getUsableLength());
+            builder.append(", [");
+            for (int d : dimensions)
+            {
+                builder.append(d);
+                builder.append(',');
+            }
+            if (dimensions.length > 0)
+            {
+                builder.setLength(builder.length() - 1);
+            }
+            builder.append(']');
+            builder.append(')');
+            if (typeVariantOrNull != null)
+            {
+                builder.append('/');
+                builder.append(typeVariantOrNull.toString());
+            }
+            return builder.toString();
+        }
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeVariant.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeVariant.java
new file mode 100644
index 0000000..25a4c05
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DataTypeVariant.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * An enum of all type variants. Type variants contain additional information on how to interpret a
+ * data set, similar to the tag for the opaque type.
+ * 
+ * @author Bernd Rinn
+ */
+public enum HDF5DataTypeVariant
+{
+    //
+    // Implementation note: Never change the order or the names of the values or else old files will
+    // be interpreted wrongly!
+    //
+    // Appending of new type variants at the end of the list is fine.
+    //
+
+    /**
+     * Used for data sets that encode time stamps as number of milli-seconds since midnight, January
+     * 1, 1970 UTC (aka "start of the epoch").
+     */
+    TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH(long.class, Long.class, Date.class),
+
+    /**
+     * A time duration in micro-seconds.
+     */
+    TIME_DURATION_MICROSECONDS(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * A time duration in milli-seconds.
+     */
+    TIME_DURATION_MILLISECONDS(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * A time duration in seconds.
+     */
+    TIME_DURATION_SECONDS(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * A time duration in minutes.
+     */
+    TIME_DURATION_MINUTES(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * A time duration in hours.
+     */
+    TIME_DURATION_HOURS(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * A time duration in days.
+     */
+    TIME_DURATION_DAYS(HDF5Utils.allTimeDurationTypes),
+
+    /**
+     * An enumeration.
+     */
+    ENUM(HDF5EnumerationValue.class, HDF5EnumerationValueArray.class),
+
+    /**
+     * No type variant.
+     */
+    NONE,
+    
+    BITFIELD(BitSet.class);
+
+    private Set<Class<?>> compatibleTypes;
+
+    private HDF5DataTypeVariant(Class<?>... compatibleTypes)
+    {
+        this.compatibleTypes = new HashSet<Class<?>>(Arrays.asList(compatibleTypes));
+    }
+
+    /**
+     * Returns <code>true</code>, if <var>typeVariantOrNull</var> is not
+     * <code>null</codeL and not <code>NONE</code>.
+     */
+    public static boolean isTypeVariant(HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return (typeVariantOrNull != null) && typeVariantOrNull.isTypeVariant();
+    }
+
+    /**
+     * Returns <code>true</code>, if <var>typeVariantOrdinal</var> does not
+     * represent <code>NONE</code>.
+     */
+    public static boolean isTypeVariant(int typeVariantOrdinal)
+    {
+        return typeVariantOrdinal != NONE.ordinal();
+    }
+
+    /**
+     * Returns <var>typeVariantOrNull</var>, if it is not <code>null</code>, and <code>NONE</code>
+     * otherwise.
+     */
+    public static HDF5DataTypeVariant maskNull(HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return (typeVariantOrNull != null) ? typeVariantOrNull : NONE;
+    }
+
+    /**
+     * Returns <var>typeVariantOrNull</var>, if it is not <code>NONE</code>, and <code>null</code>
+     * otherwise.
+     */
+    public static HDF5DataTypeVariant unmaskNone(HDF5DataTypeVariant typeVariantOrNull)
+    {
+        return (typeVariantOrNull != NONE) ? typeVariantOrNull : null;
+    }
+
+    /**
+     * Returns <code>true</code>, if this type variant is not <code>NONE</code>.
+     */
+    public boolean isTypeVariant()
+    {
+        return this != NONE;
+    }
+
+    /**
+     * Returns <code>true</code>, if the type variant denoted by <var>typeVariantOrdinal</var>
+     * corresponds to a time duration.
+     */
+    public static boolean isTimeDuration(final int typeVariantOrdinal)
+    {
+        return typeVariantOrdinal >= TIME_DURATION_MICROSECONDS.ordinal()
+                && typeVariantOrdinal <= TIME_DURATION_DAYS.ordinal();
+    }
+
+    /**
+     * Returns <code>true</code> if <var>type</var> is compatible with this type variant.
+     */
+    public boolean isCompatible(Class<?> type)
+    {
+        return compatibleTypes.contains(type);
+    }
+    
+    /**
+     * Returns the time unit for the given <var>typeVariant</var>. Note that it is an error
+     * if <var>typeVariant</var> does not correspond to a time unit.
+     */
+    public static HDF5TimeUnit getTimeUnit(final HDF5DataTypeVariant typeVariant)
+    {
+        return HDF5TimeUnit.values()[typeVariant.ordinal()
+                - HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS.ordinal()];
+    }
+
+    /**
+     * Returns the time unit for the given <var>typeVariantOrdinal</var>. Note that it is an error
+     * if <var>typeVariantOrdinal</var> does not correspond to a time unit.
+     */
+    public static HDF5TimeUnit getTimeUnit(final int typeVariantOrdinal)
+    {
+        return HDF5TimeUnit.values()[typeVariantOrdinal
+                - HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS.ordinal()];
+    }
+
+    /**
+     * Returns <code>true</code>, if this type variant corresponds to a time stamp.
+     */
+    public boolean isTimeStamp()
+    {
+        return this == TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH;
+    }
+
+    /**
+     * Returns <code>true</code>, if this type variant corresponds to a time duration.
+     */
+    public boolean isTimeDuration()
+    {
+        return isTimeDuration(ordinal());
+    }
+
+    /**
+     * Returns the time unit for this type variant or <code>null</code>, if this type variant is not
+     * a time unit.
+     */
+    public HDF5TimeUnit tryGetTimeUnit()
+    {
+        final int ordinal = ordinal();
+        return isTimeDuration(ordinal) ? getTimeUnit(ordinal) : null;
+    }
+    
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeReader.java
new file mode 100644
index 0000000..75afcec
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeReader.java
@@ -0,0 +1,638 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+
+import java.util.Date;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5DateTimeReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5DateTimeReader implements IHDF5DateTimeReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    private final HDF5LongReader longReader;
+
+    HDF5DateTimeReader(HDF5BaseReader baseReader, HDF5LongReader longReader)
+    {
+        assert baseReader != null;
+        assert longReader != null;
+
+        this.baseReader = baseReader;
+        this.longReader = longReader;
+    }
+
+    @Override
+    public long getAttrAsLong(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> getAttributeRunnable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    baseReader.checkIsTimeStamp(objectPath, attributeName, objectId, registry);
+                    final long[] data =
+                            baseReader.h5
+                                    .readAttributeAsLongArray(attributeId, H5T_NATIVE_INT64, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public Date getAttr(String objectPath, String attributeName)
+    {
+        return new Date(getAttrAsLong(objectPath, attributeName));
+    }
+
+    @Override
+    public long[] getArrayAttrAsLong(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> getAttributeRunnable =
+                new ICallableWithCleanUp<long[]>()
+                    {
+                        @Override
+                        public long[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            baseReader.checkIsTimeStamp(objectPath, attributeName, objectId,
+                                    registry);
+                            return longReader.getLongArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDLongArray getMDArrayAttrAsLong(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            baseReader.checkIsTimeStamp(objectPath, attributeName, objectId,
+                                    registry);
+                            return longReader.getLongMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public Date[] getArrayAttr(String objectPath, String attributeName)
+    {
+        final long[] timeStampArray = getArrayAttrAsLong(objectPath, attributeName);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public MDArray<Date> getMDArrayAttr(String objectPath, String attributeName)
+    {
+        final MDLongArray timeStampArray = getMDArrayAttrAsLong(objectPath, attributeName);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public boolean isTimeStamp(String objectPath, String attributeName) throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull =
+                baseReader.tryGetTypeVariant(objectPath, attributeName);
+        return typeVariantOrNull != null && typeVariantOrNull.isTimeStamp();
+    }
+
+    @Override
+    public boolean isTimeStamp(final String objectPath) throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull = baseReader.tryGetTypeVariant(objectPath);
+        return typeVariantOrNull != null && typeVariantOrNull.isTimeStamp();
+    }
+
+    @Override
+    public long readTimeStamp(final String objectPath) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> readCallable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final long[] data = new long[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readTimeStampArray(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readTimeStampArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, blockNumber * blockSize,
+                                    blockSize, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readTimeStampArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public Date readDate(final String objectPath) throws HDF5JavaException
+    {
+        return new Date(readTimeStamp(objectPath));
+    }
+
+    @Override
+    public Date[] readDateArray(final String objectPath) throws HDF5JavaException
+    {
+        final long[] timeStampArray = readTimeStampArray(objectPath);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public MDLongArray readTimeStampMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                            baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                            return longReader.readLongMDArray(dataSetId, registry);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public MDLongArray readTimeStampMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readTimeStampMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDLongArray readTimeStampMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                            baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset, blockDimensions, 
+                                            registry);
+                            final long[] dataBlock = new long[spaceParams.blockSize];
+                            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, spaceParams.memorySpaceId,
+                                    spaceParams.dataSpaceId, dataBlock);
+                            return new MDLongArray(dataBlock, blockDimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDLongArray array, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath, final MDLongArray array,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    baseReader.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<long[]>> getTimeStampArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<long[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<long[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<long[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<long[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final long[] block =
+                                        readTimeStampArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5DataBlock<long[]>(block, index.getAndIncIndex(),
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Date[] readDateArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        final long[] timestampArray = readTimeStampArrayBlock(objectPath, blockSize, blockNumber);
+        return timeStampsToDates(timestampArray);
+    }
+
+    @Override
+    public Date[] readDateArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        final long[] timestampArray =
+                readTimeStampArrayBlockWithOffset(objectPath, blockSize, offset);
+        return timeStampsToDates(timestampArray);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<Date[]>> getDateArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<Date[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<Date[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<Date[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<Date[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final long[] block =
+                                        readTimeStampArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5DataBlock<Date[]>(timeStampsToDates(block),
+                                        index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getTimeStampMDArrayNaturalBlocks(
+            final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDLongArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDLongArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDLongArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDLongArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDLongArray data =
+                                        readTimeStampMDArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDLongArray>(data,
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    private static Date[] timeStampsToDates(final long[] timeStampArray)
+    {
+        assert timeStampArray != null;
+
+        final Date[] dateArray = new Date[timeStampArray.length];
+        for (int i = 0; i < dateArray.length; ++i)
+        {
+            dateArray[i] = new Date(timeStampArray[i]);
+        }
+        return dateArray;
+    }
+
+    private static MDArray<Date> timeStampsToDates(final MDLongArray timeStampArray)
+    {
+        assert timeStampArray != null;
+
+        final long[] timeStampsFlat = timeStampArray.getAsFlatArray();
+        final MDArray<Date> dateArray = new MDArray<Date>(Date.class, timeStampArray.dimensions());
+        final Date[] datesFlat = dateArray.getAsFlatArray();
+        for (int i = 0; i < datesFlat.length; ++i)
+        {
+            datesFlat[i] = new Date(timeStampsFlat[i]);
+        }
+        return dateArray;
+    }
+
+    @Override
+    public MDArray<Date> readDateMDArray(String objectPath)
+    {
+        final MDLongArray timeStampArray = readTimeStampMDArray(objectPath);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public MDArray<Date> readDateMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        final MDLongArray timeStampArray =
+                readTimeStampMDArrayBlock(objectPath, blockDimensions, blockNumber);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public MDArray<Date> readDateMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        final MDLongArray timeStampArray =
+                readTimeStampMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+        return timeStampsToDates(timeStampArray);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<Date>>> getDateMDArrayNaturalBlocks(
+            final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDArray<Date>>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDArray<Date>>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDArray<Date>>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDArray<Date>> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDLongArray data =
+                                        readTimeStampMDArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDArray<Date>>(timeStampsToDates(data),
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeWriter.java
new file mode 100644
index 0000000..22bf4e0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DateTimeWriter.java
@@ -0,0 +1,683 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import java.util.Date;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * Implementation of {@link IHDF5DateTimeWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5DateTimeWriter extends HDF5DateTimeReader implements IHDF5DateTimeWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5DateTimeWriter(HDF5BaseWriter baseWriter, HDF5LongReader longReader)
+    {
+        super(baseWriter, longReader);
+
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final long timeStamp)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { 1 }, registry);
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        H5T_STD_I64LE, H5T_NATIVE_INT64, dataSpaceId, new long[]
+                                            { timeStamp }, registry);
+                    } else
+                    {
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        H5T_STD_I64LE, H5T_NATIVE_INT64, -1, new long[]
+                                            { timeStamp }, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setAttr(String objectPath, String name, Date date)
+    {
+        setAttr(objectPath, name, date.getTime());
+    }
+
+    @Override
+    public void setArrayAttr(String objectPath, String name, Date[] dates)
+    {
+        setArrayAttr(objectPath, name, datesToTimeStamps(dates));
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name, final long[] timeStamps)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert timeStamps != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { timeStamps.length }, registry);
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        H5T_STD_I64LE, H5T_NATIVE_INT64, dataSpaceId, timeStamps,
+                                        registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64, timeStamps.length,
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE, timeStamps.length,
+                                        registry);
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        storageTypeId, memoryTypeId, -1, timeStamps, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name, final MDLongArray timeStamps)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert timeStamps != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(timeStamps.longDimensions(),
+                                        registry);
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        H5T_STD_I64LE, H5T_NATIVE_INT64, dataSpaceId,
+                                        timeStamps.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64, timeStamps.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE, timeStamps.dimensions(),
+                                        registry);
+                        baseWriter
+                                .setAttribute(
+                                        objectPath,
+                                        name,
+                                        HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                                        storageTypeId, memoryTypeId, -1, timeStamps.getAsFlatArray(),
+                                        registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, MDArray<Date> value)
+    {
+        setMDArrayAttr(objectPath, name, datesToTimeStamps(value));
+    }
+
+    @Override
+    public void write(final String objectPath, final long timeStamp)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.writeScalar(objectPath, H5T_STD_I64LE, H5T_NATIVE_INT64,
+                                    HDFNativeData.longToByte(timeStamp), true, true, registry);
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeScalarRunnable);
+    }
+
+    @Override
+    public void createArray(String objectPath, int size)
+    {
+        createArray(objectPath, size, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId;
+                    if (features.requiresChunking())
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                        new long[]
+                                            { 0 }, new long[]
+                                            { size }, longBytes, registry);
+                    } else
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                        new long[]
+                                            { size }, null, longBytes, registry);
+                    }
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long length, final int blockSize,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert length >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId =
+                            baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                    new long[]
+                                        { length }, new long[]
+                                        { blockSize }, longBytes, registry);
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] timeStamps)
+    {
+        writeArray(objectPath, timeStamps, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] timeStamps,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert timeStamps != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_I64LE, new long[]
+                                { timeStamps.length }, longBytes, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, timeStamps);
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final long[] data, final long blockNumber)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = new long[]
+                        { data.length };
+                    final long[] slabStartOrNull = new long[]
+                        { data.length * blockNumber };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { data.length * (blockNumber + 1) }, -1, registry);
+                    baseWriter.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    baseWriter.checkIsTimeStamp(objectPath, dataSetId, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void write(final String objectPath, final Date date)
+    {
+        write(objectPath, date.getTime());
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final Date[] dates)
+    {
+        writeArray(objectPath, datesToTimeStamps(dates));
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final Date[] dates,
+            final HDF5GenericStorageFeatures features)
+    {
+        writeArray(objectPath, datesToTimeStamps(dates), features);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDLongArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath,
+                                    features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                    data.longDimensions(), 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId;
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                        features, nullDimensions, MDArray.toLong(dimensions), 8,
+                                        registry);
+                    } else
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                        features, MDArray.toLong(dimensions), null, 8, registry);
+                    }
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.createDataSet(objectPath,
+                                    features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, features,
+                                    dimensions, MDArray.toLong(blockDimensions), 8, registry);
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    baseWriter.setTypeVariant(dataSetId,
+                            HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<Date> data,
+            final HDF5IntStorageFeatures features)
+    {
+        writeMDArray(objectPath, datesToTimeStamps(data), features);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDArray<Date> data,
+            final long[] blockNumber)
+    {
+        writeMDArrayBlock(objectPath, datesToTimeStamps(data), blockNumber);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDArray<Date> data, long[] offset)
+    {
+        writeMDArrayBlockWithOffset(objectPath, datesToTimeStamps(data), offset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDArray<Date> data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        writeMDArrayBlockWithOffset(objectPath, datesToTimeStamps(data), blockDimensions, offset,
+                memoryOffset);
+    }
+
+    private static long[] datesToTimeStamps(Date[] dates)
+    {
+        assert dates != null;
+
+        final long[] timestamps = new long[dates.length];
+        for (int i = 0; i < timestamps.length; ++i)
+        {
+            timestamps[i] = dates[i].getTime();
+        }
+        return timestamps;
+    }
+
+    private static MDLongArray datesToTimeStamps(MDArray<Date> dates)
+    {
+        assert dates != null;
+
+        final Date[] datesFlat = dates.getAsFlatArray();
+        final MDLongArray timestamps = new MDLongArray(dates.dimensions());
+        final long[] timestampsFlat = timestamps.getAsFlatArray();
+        for (int i = 0; i < timestampsFlat.length; ++i)
+        {
+            timestampsFlat[i] = datesFlat[i].getTime();
+        }
+        return timestamps;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleReader.java
new file mode 100644
index 0000000..06973e8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_DOUBLE;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5DoubleReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5DoubleReader implements IHDF5DoubleReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5DoubleReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public double getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Double> getAttributeRunnable = new ICallableWithCleanUp<Double>()
+            {
+                @Override
+                public Double call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final double[] data =
+                            baseReader.h5.readAttributeAsDoubleArray(attributeId, H5T_NATIVE_DOUBLE, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public double[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<double[]> getAttributeRunnable =
+                new ICallableWithCleanUp<double[]>()
+                    {
+                        @Override
+                        public double[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getDoubleArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDDoubleArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDDoubleArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDDoubleArray>()
+                    {
+                        @Override
+                        public MDDoubleArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getDoubleMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public double[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDDoubleArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public double read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Double> readCallable = new ICallableWithCleanUp<Double>()
+            {
+                @Override
+                public Double call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final double[] data = new double[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_DOUBLE, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public double[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<double[]> readCallable = new ICallableWithCleanUp<double[]>()
+            {
+                @Override
+                public double[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readDoubleArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private double[] readDoubleArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final double[] data = new double[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_DOUBLE, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readDoubleArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private double[] readDoubleArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final double[] data = new double[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_DOUBLE, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDDoubleArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_DOUBLE, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDDoubleArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_DOUBLE, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public double[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public double[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<double[]> readCallable = new ICallableWithCleanUp<double[]>()
+            {
+                @Override
+                public double[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final double[] data = new double[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_DOUBLE, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public double[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDDoubleArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public double[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDDoubleArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public double[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDDoubleArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDDoubleArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDDoubleArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDDoubleArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDDoubleArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDDoubleArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDDoubleArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDDoubleArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDDoubleArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDDoubleArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDDoubleArray> readCallable = new ICallableWithCleanUp<MDDoubleArray>()
+            {
+                @Override
+                public MDDoubleArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readDoubleMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDDoubleArray readDoubleMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final double[] data = new double[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_DOUBLE, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDDoubleArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readDoubleMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDDoubleArray readDoubleMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_DOUBLE, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final double[] data = new double[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDDoubleArray(data, arrayDimensions);
+        } else
+        {
+            final double[] data =
+                    new double[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDDoubleArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDDoubleArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDDoubleArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDDoubleArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDDoubleArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDDoubleArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDDoubleArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDDoubleArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDDoubleArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDDoubleArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDDoubleArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDDoubleArray> readCallable = new ICallableWithCleanUp<MDDoubleArray>()
+            {
+                @Override
+                public MDDoubleArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final double[] dataBlock = new double[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_DOUBLE,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDDoubleArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDDoubleArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final double[] dataBlock =
+                new double[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_DOUBLE, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDDoubleArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<double[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<double[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<double[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<double[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<double[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final double[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<double[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDDoubleArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDDoubleArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDDoubleArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDDoubleArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDDoubleArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDDoubleArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDDoubleArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    double[] getDoubleArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_DOUBLE, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_DOUBLE;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final double[] data =
+                baseReader.h5.readAttributeAsDoubleArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDDoubleArray getDoubleMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_DOUBLE,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_DOUBLE;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final double[] data =
+                    baseReader.h5.readAttributeAsDoubleArray(attributeId,
+                            memoryTypeId, len);
+            return new MDDoubleArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleWriter.java
new file mode 100644
index 0000000..7e03564
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5DoubleWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_DOUBLE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F64LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5DoubleWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5DoubleWriter extends HDF5DoubleReader implements IHDF5DoubleWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5DoubleWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final double value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_IEEE_F64LE,
+                                        H5T_NATIVE_DOUBLE, dataSpaceId, new double[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_IEEE_F64LE,
+                                        H5T_NATIVE_DOUBLE, -1, new double[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final double[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_IEEE_F64LE, H5T_NATIVE_DOUBLE,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_DOUBLE, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_IEEE_F64LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDDoubleArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_IEEE_F64LE, H5T_NATIVE_DOUBLE,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_DOUBLE, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_IEEE_F64LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final double[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDDoubleArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final double value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_IEEE_F64LE, H5T_NATIVE_DOUBLE, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final double[] data)
+    {
+        writeArray(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final double[] data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_IEEE_F64LE, new long[]
+                                { data.length }, 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                            features, new long[] { 0 }, new long[] { size }, 8, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                            features, new long[] { size }, null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final double[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final double[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final double[][] data)
+    {
+        writeMatrix(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final double[][] data, 
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDDoubleArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final double[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDDoubleArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final double[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final double[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDDoubleArray data)
+    {
+        writeMDArray(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDDoubleArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDDoubleArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDDoubleArray data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_IEEE_F64LE, 
+                                    data.longDimensions(), 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 8, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                                features, MDArray.toLong(dimensions), null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_IEEE_F64LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDDoubleArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDDoubleArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDDoubleArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDDoubleArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDDoubleArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDDoubleArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumReader.java
new file mode 100644
index 0000000..ab565f3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumReader.java
@@ -0,0 +1,795 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.ENUM_PREFIX;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createDataTypePath;
+
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.HDF5EnumerationType.EnumStorageForm;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5EnumReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5EnumReader implements IHDF5EnumReader
+{
+    protected final HDF5BaseReader baseReader;
+
+    HDF5EnumReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    @Override
+    public HDF5EnumerationType getType(final String name)
+    {
+        baseReader.checkOpen();
+        final String dataTypePath =
+                createDataTypePath(ENUM_PREFIX, baseReader.houseKeepingNameSuffix, name);
+        final int storageDataTypeId = baseReader.getDataTypeId(dataTypePath);
+        return baseReader.getEnumTypeForStorageDataType(name, storageDataTypeId, true, null, null,
+                baseReader.fileRegistry);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String name, final String[] values)
+            throws HDF5JavaException
+    {
+        return getType(name, values, true);
+    }
+
+    @Override
+    public <T extends Enum<?>> HDF5EnumerationType getType(final Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        return getType(enumClass.getSimpleName(), ReflectionUtils.getEnumOptions(enumClass), true);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final Class<? extends Enum<?>> enumClass, final boolean check)
+            throws HDF5JavaException
+    {
+        return getType(enumClass.getSimpleName(), ReflectionUtils.getEnumOptions(enumClass), check);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String name, final Class<? extends Enum<?>> enumClass)
+            throws HDF5JavaException
+    {
+        return getType(name, ReflectionUtils.getEnumOptions(enumClass), true);
+    }
+
+    @Override
+    public <T extends Enum<?>> HDF5EnumerationType getType(final String name,
+            final Class<T> enumClass, final boolean check) throws HDF5JavaException
+    {
+        return getType(name, ReflectionUtils.getEnumOptions(enumClass), check);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String name, final String[] values, final boolean check)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5EnumerationType dataType = getType(name);
+        if (check)
+        {
+            baseReader.checkEnumValues(dataType.getStorageTypeId(), values, name);
+        }
+        return dataType;
+    }
+
+    @Override
+    public HDF5EnumerationType getType(EnumerationType genericType) throws HDF5JavaException
+    {
+        return getType(genericType, true);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(EnumerationType genericType, boolean check)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5EnumerationType dataType = getType(genericType.tryGetName());
+        if (check)
+        {
+            baseReader.checkEnumValues(dataType.getStorageTypeId(), genericType.getValueArray(),
+                    genericType.tryGetName());
+        }
+        return dataType;
+    }
+
+    @Override
+    public HDF5EnumerationType getDataSetType(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationType> readEnumTypeCallable =
+                new ICallableWithCleanUp<HDF5EnumerationType>()
+                    {
+                        @Override
+                        public HDF5EnumerationType call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, dataSetPath,
+                                            registry);
+                            return getEnumTypeForDataSetId(dataSetId, dataSetPath,
+                                    baseReader.isScaledEnum(dataSetId, registry), registry);
+                        }
+                    };
+        return baseReader.runner.call(readEnumTypeCallable);
+    }
+
+    private HDF5EnumerationType getEnumTypeForDataSetId(final int objectId,
+            final String objectName, final boolean scaledEnum, final ICleanUpRegistry registry)
+    {
+        if (scaledEnum)
+        {
+            final String enumTypeName =
+                    baseReader.getStringAttribute(objectId, objectName, HDF5Utils
+                            .getEnumTypeNameAttributeName(baseReader.houseKeepingNameSuffix),
+                            false, registry);
+            return getType(enumTypeName);
+        } else
+        {
+            final int storageDataTypeId =
+                    baseReader.h5.getDataTypeForDataSet(objectId, baseReader.fileRegistry);
+            return baseReader.getEnumTypeForStorageDataType(null, storageDataTypeId, true,
+                    objectName, null, baseReader.fileRegistry);
+        }
+    }
+
+    @Override
+    public HDF5EnumerationType getAttributeType(final String dataSetPath, final String attributeName)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationType> readEnumTypeCallable =
+                new ICallableWithCleanUp<HDF5EnumerationType>()
+                    {
+                        @Override
+                        public HDF5EnumerationType call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, dataSetPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(dataSetId, attributeName, registry);
+                            final int storageDataTypeId =
+                                    baseReader.h5.getDataTypeForAttribute(attributeId,
+                                            baseReader.fileRegistry);
+                            return baseReader.getEnumTypeForStorageDataType(null,
+                                    storageDataTypeId, true, dataSetPath, attributeName,
+                                    baseReader.fileRegistry);
+                        }
+                    };
+        return baseReader.runner.call(readEnumTypeCallable);
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public String getAttrAsString(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> readRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataType(storageDataTypeId, registry);
+
+                    final int enumDataTypeId =
+                            baseReader.getEnumDataTypeId(storageDataTypeId, registry);
+                    final int size = baseReader.h5.getDataTypeSize(enumDataTypeId);
+                    final byte[] data =
+                            baseReader.h5.readAttributeAsByteArray(attributeId, nativeDataTypeId,
+                                    size);
+                    final String value =
+                            baseReader.h5.getNameForEnumOrCompoundMemberIndex(enumDataTypeId,
+                                    EnumerationType.fromStorageForm(data, 0, size));
+                    if (value == null)
+                    {
+                        throw new HDF5JavaException("Attribute " + attributeName + " of object "
+                                + objectPath + " needs to be an Enumeration.");
+                    }
+                    return value;
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationValue getAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationValue> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValue>()
+                    {
+                        @Override
+                        public HDF5EnumerationValue call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+                            final int storageDataTypeId =
+                                    baseReader.h5.getDataTypeForAttribute(attributeId,
+                                            baseReader.fileRegistry);
+                            final int enumTypeId =
+                                    baseReader.getEnumDataTypeId(storageDataTypeId,
+                                            baseReader.fileRegistry);
+                            final HDF5EnumerationType enumType =
+                                    baseReader.getEnumTypeForStorageDataType(null, enumTypeId,
+                                            true, objectPath, attributeName,
+                                            baseReader.fileRegistry);
+                            final int nativeDataTypeId;
+                            if (storageDataTypeId != enumTypeId) // Array data type
+                            {
+                                nativeDataTypeId =
+                                        baseReader.h5
+                                                .getNativeDataType(storageDataTypeId, registry);
+                            } else
+                            {
+                                nativeDataTypeId = enumType.getNativeTypeId();
+                            }
+                            final int enumOrdinal =
+                                    baseReader.getEnumOrdinal(attributeId, nativeDataTypeId,
+                                            enumType);
+                            return new HDF5EnumerationValue(enumType, enumOrdinal);
+                        }
+                    };
+
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public <T extends Enum<T>> T getAttr(String objectPath, String attributeName, Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        final String value = getAttrAsString(objectPath, attributeName);
+        try
+        {
+            return Enum.valueOf(enumClass, value);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException("The Java enum class " + enumClass.getCanonicalName()
+                    + " has no value '" + value + "'.");
+        }
+    }
+
+    @Override
+    public HDF5EnumerationValueArray getArrayAttr(final String objectPath,
+            final String attributeName) throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationValueArray> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValueArray>()
+                    {
+                        @Override
+                        public HDF5EnumerationValueArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+                            return baseReader.getEnumValueArray(attributeId, objectPath,
+                                    attributeName, registry);
+                        }
+
+                    };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    public String[] getEnumArrayAttributeAsString(final String objectPath,
+            final String attributeName) throws HDF5JavaException
+    {
+        final HDF5EnumerationValueArray array = getArrayAttr(objectPath, attributeName);
+        return array.toStringArray();
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray getMDArrayAttr(final String objectPath,
+            final String attributeName) throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationValueMDArray> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValueMDArray>()
+                    {
+                        @Override
+                        public HDF5EnumerationValueMDArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+                            return baseReader.getEnumValueMDArray(attributeId, objectPath,
+                                    attributeName, registry);
+                        }
+
+                    };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public String readAsString(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> writeRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int storageDataTypeId =
+                            baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataType(storageDataTypeId, registry);
+                    final int size = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                    final byte[] data = new byte[size];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, data);
+                    final String value =
+                            baseReader.h5.getNameForEnumOrCompoundMemberIndex(storageDataTypeId,
+                                    EnumerationType.fromStorageForm(data));
+                    if (value == null)
+                    {
+                        throw new HDF5JavaException(objectPath + " needs to be an Enumeration.");
+                    }
+                    return value;
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public <T extends Enum<T>> T read(String objectPath, Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        final String value = readAsString(objectPath);
+        try
+        {
+            return Enum.valueOf(enumClass, value);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException("The Java enum class " + enumClass.getCanonicalName()
+                    + " has no value '" + value + "'.");
+        }
+    }
+
+    @Override
+    public HDF5EnumerationValue read(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5EnumerationValue> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValue>()
+                    {
+                        @Override
+                        public HDF5EnumerationValue call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5EnumerationType enumType =
+                                    getEnumTypeForDataSetId(dataSetId, objectPath, false, registry);
+                            return readEnumValue(dataSetId, enumType);
+                        }
+                    };
+
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationValue read(final String objectPath, final HDF5EnumerationType enumType)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert enumType != null;
+
+        baseReader.checkOpen();
+        enumType.check(baseReader.fileId);
+        final ICallableWithCleanUp<HDF5EnumerationValue> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValue>()
+                    {
+                        @Override
+                        public HDF5EnumerationValue call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            return readEnumValue(dataSetId, enumType);
+                        }
+                    };
+
+        return baseReader.runner.call(readRunnable);
+    }
+
+    private HDF5EnumerationValue readEnumValue(final int dataSetId,
+            final HDF5EnumerationType enumType)
+    {
+        final byte[] data = new byte[enumType.getStorageForm().getStorageSize()];
+        baseReader.h5.readDataSet(dataSetId, enumType.getNativeTypeId(), data);
+        return new HDF5EnumerationValue(enumType, EnumerationType.fromStorageForm(data));
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArray(final String objectPath,
+            final HDF5EnumerationType enumTypeOrNull) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        if (enumTypeOrNull != null)
+        {
+            enumTypeOrNull.check(baseReader.fileId);
+        }
+        final ICallableWithCleanUp<HDF5EnumerationValueArray> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValueArray>()
+                    {
+                        @Override
+                        public HDF5EnumerationValueArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final long[] dimensions =
+                                    baseReader.h5.getDataDimensions(dataSetId, registry);
+                            final boolean scaledEnum = baseReader.isScaledEnum(dataSetId, registry);
+                            final HDF5EnumerationType actualEnumType =
+                                    (enumTypeOrNull == null) ? getEnumTypeForDataSetId(dataSetId,
+                                            objectPath, scaledEnum, registry) : enumTypeOrNull;
+                            final int arraySize = HDF5Utils.getOneDimensionalArraySize(dimensions);
+                            final EnumStorageForm storageForm = actualEnumType.getStorageForm();
+                            final byte[] data = new byte[arraySize * storageForm.getStorageSize()];
+                            if (scaledEnum)
+                            {
+                                baseReader.h5.readDataSet(dataSetId, actualEnumType
+                                        .getStorageForm().getIntNativeTypeId(), data);
+                            } else
+                            {
+                                baseReader.h5.readDataSet(dataSetId,
+                                        actualEnumType.getNativeTypeId(), data);
+                            }
+                            return new HDF5EnumerationValueArray(actualEnumType,
+                                    EnumerationType.fromStorageForm(data, storageForm));
+                        }
+                    };
+
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArray(final String objectPath) throws HDF5JavaException
+    {
+        return readArray(objectPath, (HDF5EnumerationType) null);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType enumTypeOrNull, final int blockSize, final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        if (enumTypeOrNull != null)
+        {
+            enumTypeOrNull.check(baseReader.fileId);
+        }
+        final ICallableWithCleanUp<HDF5EnumerationValueArray> readRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValueArray>()
+                    {
+                        @Override
+                        public HDF5EnumerationValueArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset, blockSize,
+                                            registry);
+                            final boolean scaledEnum = baseReader.isScaledEnum(dataSetId, registry);
+                            final HDF5EnumerationType actualEnumType =
+                                    (enumTypeOrNull == null) ? getEnumTypeForDataSetId(dataSetId,
+                                            objectPath, scaledEnum, registry) : enumTypeOrNull;
+                            final byte[] data =
+                                    new byte[spaceParams.blockSize
+                                            * actualEnumType.getStorageForm().getStorageSize()];
+                            if (scaledEnum)
+                            {
+                                baseReader.h5.readDataSet(dataSetId, actualEnumType
+                                        .getStorageForm().getIntNativeTypeId(),
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            } else
+                            {
+                                baseReader.h5.readDataSet(dataSetId,
+                                        actualEnumType.getNativeTypeId(),
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            }
+                            return new HDF5EnumerationValueArray(actualEnumType,
+                                    EnumerationType.fromStorageForm(data,
+                                            actualEnumType.getStorageForm()));
+                        }
+                    };
+
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset)
+    {
+        return readArrayBlockWithOffset(objectPath, (HDF5EnumerationType) null, blockSize, offset);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, (HDF5EnumerationType) null, blockSize,
+                blockNumber * blockSize);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readArrayBlock(final String objectPath,
+            final HDF5EnumerationType enumType, final int blockSize, final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, enumType, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArray(final String objectPath)
+            throws HDF5JavaException
+    {
+        return readMDArray(objectPath, (HDF5EnumerationType) null);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArray(final String objectPath,
+            final HDF5EnumerationType enumTypeOrNull) throws HDF5JavaException
+    {
+        return readMDArrayBlockWithOffset(objectPath, enumTypeOrNull, null, null);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType enumTypeOrNull, final int[] blockDimensionsOrNull,
+            final long[] offsetOrNull)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        if (enumTypeOrNull != null)
+        {
+            enumTypeOrNull.check(baseReader.fileId);
+        }
+        final ICallableWithCleanUp<HDF5EnumerationValueMDArray> writeRunnable =
+                new ICallableWithCleanUp<HDF5EnumerationValueMDArray>()
+                    {
+                        @Override
+                        public HDF5EnumerationValueMDArray call(final ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final boolean scaledEnum = baseReader.isScaledEnum(dataSetId, registry);
+                            final HDF5EnumerationType actualEnumType =
+                                    (enumTypeOrNull == null) ? getEnumTypeForDataSetId(dataSetId,
+                                            objectPath, scaledEnum, registry) : enumTypeOrNull;
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offsetOrNull,
+                                            blockDimensionsOrNull, registry);
+                            final EnumStorageForm storageForm = actualEnumType.getStorageForm();
+                            final byte[] byteArr =
+                                    new byte[spaceParams.blockSize * storageForm.getStorageSize()];
+                            if (scaledEnum)
+                            {
+                                baseReader.h5
+                                        .readDataSet(dataSetId, storageForm.getIntNativeTypeId(),
+                                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                                byteArr);
+                            } else
+                            {
+                                baseReader.h5
+                                        .readDataSet(dataSetId, actualEnumType.getNativeTypeId(),
+                                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                                byteArr);
+                            }
+                            return new HDF5EnumerationValueMDArray(actualEnumType,
+                                    EnumerationType.fromStorageForm(byteArr,
+                                            spaceParams.dimensions, storageForm));
+                        }
+                    };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArrayBlock(String objectPath,
+            HDF5EnumerationType type, int[] blockDimensions, long[] blockNumber)
+            throws HDF5JavaException
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockDimensions[i] * blockNumber[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, type, blockDimensions, offset);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber) throws HDF5JavaException
+    {
+        return readMDArrayBlock(objectPath, null, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset) throws HDF5JavaException
+    {
+        return readMDArrayBlockWithOffset(objectPath, null, blockDimensions, offset);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getArrayBlocks(
+            final String objectPath, final HDF5EnumerationType enumTypeOrNull)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        return new Iterable<HDF5DataBlock<HDF5EnumerationValueArray>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<HDF5EnumerationValueArray>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<HDF5EnumerationValueArray>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<HDF5EnumerationValueArray> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final HDF5EnumerationValueArray block =
+                                        readArrayBlockWithOffset(objectPath, enumTypeOrNull,
+                                                index.getBlockSize(), offset);
+                                return new HDF5DataBlock<HDF5EnumerationValueArray>(block,
+                                        index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getArrayBlocks(final String objectPath)
+            throws HDF5JavaException
+    {
+        return getArrayBlocks(objectPath, (HDF5EnumerationType) null);
+    }
+
+    @Override
+    public Iterable<HDF5MDEnumBlock> getMDArrayBlocks(final String objectPath,
+            final HDF5EnumerationType enumTypeOrNull) throws HDF5JavaException
+    {
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(objectPath,
+                        DataTypeInfoOptions.MINIMAL, true));
+        return new Iterable<HDF5MDEnumBlock>()
+            {
+                @Override
+                public Iterator<HDF5MDEnumBlock> iterator()
+                {
+                    return new Iterator<HDF5MDEnumBlock>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDEnumBlock next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final HDF5EnumerationValueMDArray block =
+                                        readMDArrayBlockWithOffset(objectPath, enumTypeOrNull,
+                                                index.getBlockSize(), offset);
+                                return new HDF5MDEnumBlock(block, index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDEnumBlock> getMDArrayBlocks(String objectPath) throws HDF5JavaException
+    {
+        return getMDArrayBlocks(objectPath, null);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumWriter.java
new file mode 100644
index 0000000..24dafdd
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumWriter.java
@@ -0,0 +1,905 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5EnumWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5EnumWriter extends HDF5EnumReader implements IHDF5EnumWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5EnumWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Value creation
+    // /////////////////////
+
+    @Override
+    public HDF5EnumerationValue newVal(String typeName, String[] options, String value)
+    {
+        return new HDF5EnumerationValue(getType(typeName, options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newVal(String typeName, String[] options, int value)
+    {
+        return new HDF5EnumerationValue(getType(typeName, options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newVal(String typeName, String[] options, short value)
+    {
+        return new HDF5EnumerationValue(getType(typeName, options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newVal(String typeName, String[] options, byte value)
+    {
+        return new HDF5EnumerationValue(getType(typeName, options), value);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValue newVal(String typeName, Enum<T> value)
+    {
+        return new HDF5EnumerationValue(getType(typeName, getEnumClass(value)), value);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, String[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, int[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, short[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, byte[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueArray newArray(String typeName, Enum<T>[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(typeName, getEnumClass(values)), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDArray<String> values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDIntArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDShortArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDByteArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(typeName, options), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newMDArray(String typeName,
+            MDArray<Enum<T>> values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(typeName, getEnumClass(values)), values);
+    }
+
+    @Override
+    public HDF5EnumerationValue newAnonVal(String[] options, String value)
+    {
+        return new HDF5EnumerationValue(getAnonType(options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newAnonVal(String[] options, int value)
+    {
+        return new HDF5EnumerationValue(getAnonType(options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newAnonVal(String[] options, short value)
+    {
+        return new HDF5EnumerationValue(getAnonType(options), value);
+    }
+
+    @Override
+    public HDF5EnumerationValue newAnonVal(String[] options, byte value)
+    {
+        return new HDF5EnumerationValue(getAnonType(options), value);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValue newAnonVal(Enum<T> value)
+    {
+        return new HDF5EnumerationValue(getAnonType(getEnumClass(value)), value);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValue newVal(Enum<T> value)
+    {
+        return new HDF5EnumerationValue(getType(getEnumClass(value)), value);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newAnonArray(String[] options, String[] values)
+    {
+        return new HDF5EnumerationValueArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newAnonArray(String[] options, int[] values)
+    {
+        return new HDF5EnumerationValueArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newAnonArray(String[] options, short[] values)
+    {
+        return new HDF5EnumerationValueArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray newAnonArray(String[] options, byte[] values)
+    {
+        return new HDF5EnumerationValueArray(getAnonType(options), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueArray newAnonArray(Enum<T>[] values)
+    {
+        return new HDF5EnumerationValueArray(getAnonType(getEnumClass(values)), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueArray newArray(Enum<T>[] values)
+    {
+        return new HDF5EnumerationValueArray(getType(getEnumClass(values)), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDArray<String> values)
+    {
+        return new HDF5EnumerationValueMDArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDIntArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDShortArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getAnonType(options), values);
+    }
+
+    @Override
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDByteArray values)
+    {
+        return new HDF5EnumerationValueMDArray(getAnonType(options), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newAnonMDArray(MDArray<Enum<T>> values)
+    {
+        return new HDF5EnumerationValueMDArray(getAnonType(getEnumClass(values)), values);
+    }
+
+    @Override
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newMDArray(MDArray<Enum<T>> values)
+    {
+        return new HDF5EnumerationValueMDArray(getType(getEnumClass(values)), values);
+    }
+
+    @SuppressWarnings("unchecked")
+    private <T extends Enum<T>> Class<Enum<T>> getEnumClass(final Enum<T> value)
+    {
+        return (Class<Enum<T>>) value.getClass();
+    }
+
+    @SuppressWarnings("unchecked")
+    private <T extends Enum<T>> Class<Enum<T>> getEnumClass(Enum<T>[] data)
+    {
+        return (Class<Enum<T>>) data.getClass().getComponentType();
+    }
+
+    @SuppressWarnings("unchecked")
+    private <T extends Enum<T>> Class<Enum<T>> getEnumClass(MDArray<Enum<T>> data)
+    {
+        return (Class<Enum<T>>) data.getAsFlatArray().getClass().getComponentType();
+    }
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    @Override
+    public HDF5EnumerationType getAnonType(String[] values) throws HDF5JavaException
+    {
+        return getType(null, values, false);
+    }
+
+    @Override
+    public HDF5EnumerationType getAnonType(final Class<? extends Enum<?>> enumClass)
+            throws HDF5JavaException
+    {
+        return getType(null, ReflectionUtils.getEnumOptions(enumClass));
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String name, final String[] values)
+            throws HDF5JavaException
+    {
+        return getType(name, values, true);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String nameOrNull, final String[] values,
+            final boolean check) throws HDF5JavaException
+    {
+        baseWriter.checkOpen();
+        final int storageDataTypeId =
+                getOrCreateEnumDataType(nameOrNull, values, baseWriter.keepDataSetIfExists, check);
+        final int nativeDataTypeId =
+                baseWriter.h5.getNativeDataType(storageDataTypeId, baseWriter.fileRegistry);
+        return new HDF5EnumerationType(baseWriter.fileId, storageDataTypeId, nativeDataTypeId,
+                (nameOrNull == null) ? "__anonymous__" : nameOrNull, values, baseWriter);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(EnumerationType genericType, boolean check)
+            throws HDF5JavaException
+    {
+        baseWriter.checkOpen();
+        final int storageDataTypeId =
+                getOrCreateEnumDataType(genericType.tryGetName(), genericType.getValueArray(),
+                        baseWriter.keepDataSetIfExists, check);
+        final int nativeDataTypeId =
+                baseWriter.h5.getNativeDataType(storageDataTypeId, baseWriter.fileRegistry);
+        return new HDF5EnumerationType(baseWriter.fileId, storageDataTypeId, nativeDataTypeId,
+                (genericType.tryGetName() == null) ? new EnumerationType("__anonymous__",
+                        genericType.getValueArray()) : genericType, baseWriter);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final String name, final Class<? extends Enum<?>> enumClass)
+            throws HDF5JavaException
+    {
+        return getType(name, ReflectionUtils.getEnumOptions(enumClass), true);
+    }
+
+    @Override
+    public <T extends Enum<?>> HDF5EnumerationType getType(final String name,
+            final Class<T> enumClass, final boolean check) throws HDF5JavaException
+    {
+        return getType(name, ReflectionUtils.getEnumOptions(enumClass), check);
+    }
+
+    @Override
+    public <T extends Enum<?>> HDF5EnumerationType getType(final Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        return getType(enumClass.getSimpleName(), ReflectionUtils.getEnumOptions(enumClass), true);
+    }
+
+    @Override
+    public HDF5EnumerationType getType(final Class<? extends Enum<?>> enumClass, final boolean check)
+            throws HDF5JavaException
+    {
+        return getType(enumClass.getSimpleName(), ReflectionUtils.getEnumOptions(enumClass), check);
+    }
+
+    private int getOrCreateEnumDataType(final String dataTypeNameOrNull, final String[] values,
+            boolean committedDataTypeHasPreference, boolean checkIfExists)
+    {
+        final String dataTypePathOrNull =
+                (dataTypeNameOrNull == null) ? null : HDF5Utils.createDataTypePath(
+                        HDF5Utils.ENUM_PREFIX, baseWriter.houseKeepingNameSuffix,
+                        dataTypeNameOrNull);
+        final int committedStorageDataTypeId =
+                (dataTypePathOrNull == null) ? -1 : baseWriter.getDataTypeId(dataTypePathOrNull);
+        final boolean typeExists = (committedStorageDataTypeId >= 0);
+        int storageDataTypeId = committedStorageDataTypeId;
+        final boolean commitType;
+        if ((typeExists == false) || (committedDataTypeHasPreference == false))
+        {
+            storageDataTypeId = baseWriter.h5.createDataTypeEnum(values, baseWriter.fileRegistry);
+            final boolean typesAreEqual =
+                    typeExists
+                            && baseWriter.h5.dataTypesAreEqual(committedStorageDataTypeId,
+                                    storageDataTypeId);
+            commitType =
+                    (dataTypeNameOrNull != null)
+                            && ((typeExists == false) || (typesAreEqual == false));
+            if (typeExists && commitType)
+            {
+                final String replacementDataTypePath =
+                        baseWriter.moveLinkOutOfTheWay(dataTypePathOrNull);
+                baseWriter.renameNamedDataType(dataTypePathOrNull, replacementDataTypePath);
+            }
+            if (typesAreEqual)
+            {
+                storageDataTypeId = committedStorageDataTypeId;
+            }
+        } else
+        {
+            commitType = false;
+            if (checkIfExists)
+            {
+                baseWriter.checkEnumValues(storageDataTypeId, values, dataTypeNameOrNull);
+            }
+        }
+        if (commitType)
+        {
+            baseWriter.commitDataType(dataTypePathOrNull, storageDataTypeId);
+        }
+        return storageDataTypeId;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final HDF5EnumerationValue value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        value.getType().check(baseWriter.fileId);
+        final int storageDataTypeId = value.getType().getStorageTypeId();
+        final int nativeDataTypeId = value.getType().getNativeTypeId();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name,
+                                        storageDataTypeId, nativeDataTypeId,
+                                        dataSpaceId, value.toStorageForm(), registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, storageDataTypeId,
+                                        nativeDataTypeId, -1, value.toStorageForm(), registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setAttr(String objectPath, String name, Enum<?> value) throws HDF5JavaException
+    {
+        setAttr(objectPath, name, new HDF5EnumerationValue(getType(getEnumClass(value)), value));
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final HDF5EnumerationValueArray value)
+    {
+        baseWriter.setEnumArrayAttribute(objectPath, name, value);
+    }
+
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, HDF5EnumerationValueMDArray value)
+    {
+        baseWriter.setEnumMDArrayAttribute(objectPath, name, value);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final HDF5EnumerationValue value)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        value.getType().check(baseWriter.fileId);
+        final int storageDataTypeId = value.getType().getStorageTypeId();
+        final int nativeDataTypeId = value.getType().getNativeTypeId();
+        baseWriter.writeScalar(objectPath, storageDataTypeId, nativeDataTypeId,
+                value.toStorageForm());
+    }
+
+    @Override
+    public void write(final String objectPath, final Enum<?> value) throws HDF5JavaException
+    {
+        write(objectPath, new HDF5EnumerationValue(getType(getEnumClass(value)), value));
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final HDF5EnumerationValueArray data)
+            throws HDF5JavaException
+    {
+        writeArray(objectPath, data, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final HDF5EnumerationValueArray data,
+            final HDF5IntStorageFeatures features) throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        data.getType().check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.isScaling())
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.createDeflateAndIntegerScaling(
+                                        features.getDeflateLevel(), data.getType().getEnumType()
+                                                .getNumberOfBits(),
+                                        baseWriter.keepDataIfExists(features));
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, data.getType()
+                                        .getIntStorageTypeId(), new long[]
+                                    { data.getLength() }, data.getStorageForm().getStorageSize(),
+                                        actualFeatures, registry);
+                        H5Dwrite(dataSetId, data.getType().getIntNativeTypeId(), H5S_ALL, H5S_ALL,
+                                H5P_DEFAULT, data.toStorageForm());
+                        baseWriter.setTypeVariant(dataSetId, HDF5DataTypeVariant.ENUM, registry);
+                        baseWriter.setStringAttribute(dataSetId, HDF5Utils
+                                .getEnumTypeNameAttributeName(baseWriter.houseKeepingNameSuffix),
+                                data.getType().getName(), data.getType().getName().length(), true,
+                                registry);
+                    } else
+                    {
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, data.getType()
+                                        .getStorageTypeId(), new long[]
+                                    { data.getLength() }, data.getStorageForm().getStorageSize(),
+                                        features, registry);
+                        H5Dwrite(dataSetId, data.getType().getNativeTypeId(), H5S_ALL, H5S_ALL,
+                                H5P_DEFAULT, data.toStorageForm());
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public HDF5EnumerationType createArray(final String objectPath,
+            final HDF5EnumerationType enumType, final int size)
+    {
+        return createArray(objectPath, enumType, size, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5EnumerationType createArray(final String objectPath,
+            final HDF5EnumerationType enumType, final long size, final int blockSize)
+    {
+        return createArray(objectPath, enumType, size, blockSize,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5EnumerationType createArray(final String objectPath,
+            final HDF5EnumerationType enumType, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        baseWriter.checkOpen();
+        enumType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.isScaling())
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualCompression =
+                                HDF5IntStorageFeatures.createDeflateAndIntegerScaling(features
+                                        .getDeflateLevel(), enumType.getEnumType()
+                                        .getNumberOfBits());
+                        final int dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        enumType.getIntStorageTypeId(), actualCompression,
+                                        new long[]
+                                            { size }, new long[]
+                                            { blockSize }, enumType.getStorageForm()
+                                                .getStorageSize(), registry);
+                        baseWriter.setTypeVariant(dataSetId, HDF5DataTypeVariant.ENUM, registry);
+                        baseWriter.setStringAttribute(dataSetId, HDF5Utils
+                                .getEnumTypeNameAttributeName(baseWriter.houseKeepingNameSuffix),
+                                enumType.getName(), enumType.getName().length(), true, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, enumType.getStorageTypeId(), features,
+                                new long[]
+                                    { size }, new long[]
+                                    { blockSize }, enumType.getStorageForm().getStorageSize(),
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+        return enumType;
+    }
+
+    @Override
+    public HDF5EnumerationType createArray(final String objectPath,
+            final HDF5EnumerationType enumType, final long size,
+            final HDF5IntStorageFeatures features)
+    {
+        baseWriter.checkOpen();
+        enumType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        create(new long[]
+                            { 0 }, new long[]
+                            { size }, registry);
+                    } else
+                    {
+                        create(new long[]
+                            { size }, null, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+
+                private void create(final long[] dimensions, final long[] blockDimensionsOrNull,
+                        final ICleanUpRegistry registry)
+                {
+                    if (features.isScaling())
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualCompression =
+                                HDF5IntStorageFeatures.createDeflateAndIntegerScaling(features
+                                        .getDeflateLevel(), enumType.getEnumType()
+                                        .getNumberOfBits());
+                        final int dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        enumType.getIntStorageTypeId(), actualCompression,
+                                        dimensions, blockDimensionsOrNull, enumType
+                                                .getStorageForm().getStorageSize(), registry);
+                        baseWriter.setTypeVariant(dataSetId, HDF5DataTypeVariant.ENUM, registry);
+                        baseWriter.setStringAttribute(dataSetId, HDF5Utils
+                                .getEnumTypeNameAttributeName(baseWriter.houseKeepingNameSuffix),
+                                enumType.getName(), enumType.getName().length(), true, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, enumType.getStorageTypeId(), features,
+                                dimensions, blockDimensionsOrNull, enumType.getStorageForm()
+                                        .getStorageSize(), registry);
+                    }
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+        return enumType;
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final HDF5EnumerationValueArray data,
+            final long blockNumber)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeArrayBlockWithOffset(objectPath, data, data.getLength(), data.getLength()
+                * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationValueArray data, final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        data.getType().check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    if (baseWriter.isScaledEnum(dataSetId, registry))
+                    {
+                        H5Dwrite(dataSetId, data.getType().getIntNativeTypeId(), memorySpaceId,
+                                dataSpaceId, H5P_DEFAULT, data.toStorageForm());
+                    } else
+                    {
+                        H5Dwrite(dataSetId, data.getType().getNativeTypeId(), memorySpaceId,
+                                dataSpaceId, H5P_DEFAULT, data.toStorageForm());
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final HDF5EnumerationValueMDArray data)
+            throws HDF5JavaException
+    {
+        writeMDArray(objectPath, data, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final HDF5EnumerationValueMDArray data,
+            final HDF5IntStorageFeatures features) throws HDF5JavaException
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        data.getType().check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.isScaling())
+                    {
+                        features.checkScalingOK(baseWriter.fileFormat);
+                        final HDF5IntStorageFeatures actualFeatures =
+                                HDF5IntStorageFeatures.createDeflateAndIntegerScaling(
+                                        features.getDeflateLevel(), data.getType().getEnumType()
+                                                .getNumberOfBits(),
+                                        baseWriter.keepDataIfExists(features));
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, data.getType()
+                                        .getIntStorageTypeId(), data.longDimensions(), data
+                                        .getStorageForm().getStorageSize(), actualFeatures,
+                                        registry);
+                        H5Dwrite(dataSetId, data.getType().getIntNativeTypeId(), H5S_ALL, H5S_ALL,
+                                H5P_DEFAULT, data.toStorageForm());
+                        baseWriter.setTypeVariant(dataSetId, HDF5DataTypeVariant.ENUM, registry);
+                        baseWriter.setStringAttribute(dataSetId, HDF5Utils
+                                .getEnumTypeNameAttributeName(baseWriter.houseKeepingNameSuffix),
+                                data.getType().getName(), data.getType().getName().length(), true,
+                                registry);
+                    } else
+                    {
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, data.getType()
+                                        .getStorageTypeId(), data.longDimensions(), data
+                                        .getStorageForm().getStorageSize(), features, registry);
+                        H5Dwrite(dataSetId, data.getType().getNativeTypeId(), H5S_ALL, H5S_ALL,
+                                H5P_DEFAULT, data.toStorageForm());
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    private <T> void writeEnumMDArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType enumType, final byte[] data, final long[] dimensions,
+            final long[] offset, final long[] dataSetDimensions)
+    {
+        assert objectPath != null;
+        assert enumType != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        enumType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, enumType.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationValueMDArray data, final long[] offset)
+    {
+        final long[] dimensions = data.longDimensions();
+        final long[] dataSetDimensions = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            dataSetDimensions[i] = offset[i] + dimensions[i];
+        }
+        writeEnumMDArrayBlockWithOffset(objectPath, data.getType(), data.toStorageForm(),
+                dimensions, offset, dataSetDimensions);
+    }
+
+    @Override
+    public void writeMDArrayBlock(String objectPath, HDF5EnumerationValueMDArray data,
+            long[] blockNumber)
+    {
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        final long[] dataSetDimensions = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+            dataSetDimensions[i] = offset[i] + dimensions[i];
+        }
+        writeEnumMDArrayBlockWithOffset(objectPath, data.getType(), data.toStorageForm(),
+                dimensions, offset, dataSetDimensions);
+    }
+
+    @Override
+    public HDF5EnumerationType createMDArray(String objectPath, HDF5EnumerationType enumType,
+            int[] dimensions)
+    {
+        return createMDArray(objectPath, enumType, dimensions,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5EnumerationType createMDArray(String objectPath, HDF5EnumerationType enumType,
+            long[] dimensions, int[] blockDimensions)
+    {
+        return createMDArray(objectPath, enumType, dimensions, blockDimensions,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5EnumerationType createMDArray(final String objectPath,
+            final HDF5EnumerationType enumType, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert enumType != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        enumType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, enumType.getStorageTypeId(), features,
+                            dimensions, MDAbstractArray.toLong(blockDimensions), enumType
+                                    .getStorageForm().getStorageSize(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+        return enumType;
+    }
+
+    @Override
+    public HDF5EnumerationType createMDArray(final String objectPath,
+            final HDF5EnumerationType enumType, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert enumType != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        enumType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(final ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, enumType.getStorageTypeId(), features,
+                                nullDimensions, MDAbstractArray.toLong(dimensions), enumType
+                                        .getStorageForm().getStorageSize(), registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, enumType.getStorageTypeId(), features,
+                                MDAbstractArray.toLong(dimensions), null, enumType.getStorageForm()
+                                        .getStorageSize(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+        return enumType;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationType.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationType.java
new file mode 100644
index 0000000..493e4a0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationType.java
@@ -0,0 +1,234 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT16;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT32;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT8;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;
+
+import java.util.Iterator;
+import java.util.List;
+
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class that represents an enumeration for a given HDF5 file and <var>values</var> array.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5EnumerationType extends HDF5DataType implements Iterable<String>
+{
+    /**
+     * The storage form (as size in bytes) of an enumeration type.
+     */
+    public enum EnumStorageForm
+    {
+        /**
+         * One byte, for up to 255 alternatives.
+         */
+        BYTE(1, H5T_NATIVE_INT8, H5T_STD_U8LE),
+        /**
+         * Two bytes, for up to 65535 alternatives.
+         */
+        SHORT(2, H5T_NATIVE_INT16, H5T_STD_U16LE),
+        /**
+         * Four bytes, for more than 65535 alternatives.
+         */
+        INT(4, H5T_NATIVE_INT32, H5T_STD_U32LE);
+
+        private final byte storageSize;
+
+        private final int intNativeType;
+
+        private final int intStorageType;
+
+        EnumStorageForm(int storageSize, int intNativeType, int intStorageType)
+        {
+            this.storageSize = (byte) storageSize;
+            this.intNativeType = intNativeType;
+            this.intStorageType = intStorageType;
+        }
+
+        /**
+         * Return the number of bytes (1, 2 or 4) of this storage form.
+         */
+        public byte getStorageSize()
+        {
+            return storageSize;
+        }
+
+        int getIntNativeTypeId()
+        {
+            return intNativeType;
+        }
+
+        int getIntStorageTypeId()
+        {
+            return intStorageType;
+        }
+    }
+
+    private final EnumerationType enumType;
+
+    /**
+     * Returns the storage data type id of the corresponding integer type of this type.
+     */
+    int getIntStorageTypeId()
+    {
+        return getStorageForm().getIntStorageTypeId();
+    }
+
+    /**
+     * Returns the native data type id of the corresponding integer type of this type.
+     */
+    int getIntNativeTypeId()
+    {
+        return getStorageForm().getIntNativeTypeId();
+    }
+
+    HDF5EnumerationType(int fileId, int storageTypeId, int nativeTypeId, String nameOrNull,
+            String[] values, HDF5BaseReader baseReader)
+    {
+        super(fileId, storageTypeId, nativeTypeId, baseReader);
+
+        assert values != null;
+
+        this.enumType = new EnumerationType(nameOrNull, values);
+    }
+
+    HDF5EnumerationType(int fileId, int storageTypeId, int nativeTypeId, EnumerationType enumType,
+            HDF5BaseReader baseReader)
+    {
+        super(fileId, storageTypeId, nativeTypeId, baseReader);
+
+        assert enumType != null;
+
+        this.enumType = enumType;
+    }
+
+    EnumerationType getEnumType()
+    {
+        return enumType;
+    }
+
+    /**
+     * Returns the ordinal value for the given string <var>value</var>, if <var>value</var> is a
+     * member of the enumeration, and <code>null</code> otherwise.
+     */
+    public Integer tryGetIndexForValue(String value)
+    {
+        return enumType.tryGetIndexForValue(value);
+    }
+
+    /**
+     * Returns the name of this type, if it exists and <code>null</code> otherwise.
+     */
+    @Override
+    public String tryGetName()
+    {
+        return enumType.tryGetName();
+    }
+
+    /**
+     * Returns the allowed values of this enumeration type.
+     */
+    public List<String> getValues()
+    {
+        return enumType.getValues();
+    }
+
+    /**
+     * Returns the (file-independent) {@link EnumerationType} of this (file-dependen)
+     * {@link HDF5EnumerationType}.
+     */
+    public EnumerationType getEnumerationType()
+    {
+        return enumType;
+    }
+
+    /**
+     * Returns the {@link EnumStorageForm} of this enumeration type.
+     */
+    public EnumStorageForm getStorageForm()
+    {
+        return enumType.getStorageForm();
+    }
+
+    HDF5EnumerationValue createFromStorageForm(byte[] data, int offset)
+    {
+        return new HDF5EnumerationValue(this, getOrdinalFromStorageForm(data, offset));
+    }
+
+    String createStringFromStorageForm(byte[] data, int offset)
+    {
+        return enumType.createStringFromStorageForm(data, offset);
+    }
+
+    int getOrdinalFromStorageForm(byte[] data, int offset)
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return data[offset];
+            case SHORT:
+                return HDFNativeData.byteToShort(data, offset);
+            case INT:
+                return HDFNativeData.byteToInt(data, offset);
+        }
+        throw new Error("Illegal storage form (" + getStorageForm() + ".)");
+    }
+
+    //
+    // Iterable
+    //
+
+    /**
+     * Returns an {@link Iterator} over all values of this enumeration type.
+     * {@link Iterator#remove()} is not allowed and will throw an
+     * {@link UnsupportedOperationException}.
+     */
+    @Override
+    public Iterator<String> iterator()
+    {
+        return enumType.iterator();
+    }
+
+    @Override
+    public int hashCode()
+    {
+        return enumType.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final HDF5EnumerationType other = (HDF5EnumerationType) obj;
+        return enumType.equals(other.enumType);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValue.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValue.java
new file mode 100644
index 0000000..7f35c69
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValue.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+/**
+ * A class the represents an HDF enumeration value.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5EnumerationValue
+{
+    private final HDF5EnumerationType type;
+
+    private final int ordinal;
+
+    /**
+     * Creates an enumeration value.
+     * 
+     * @param type The enumeration type of this value.
+     * @param value The value in the <var>type</var>.
+     * @throws IllegalArgumentException If the <var>ordinal</var> is outside of the range of allowed
+     *             values of the <var>type</var>.
+     */
+    public HDF5EnumerationValue(HDF5EnumerationType type, Enum<?> value)
+            throws IllegalArgumentException
+    {
+        this(type, value.name());
+    }
+
+    /**
+     * Creates an enumeration value.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinal The ordinal value of the value in the <var>type</var>.
+     * @throws IllegalArgumentException If the <var>ordinal</var> is outside of the range of allowed
+     *             values of the <var>type</var>.
+     */
+    public HDF5EnumerationValue(HDF5EnumerationType type, int ordinal)
+            throws IllegalArgumentException
+    {
+        assert type != null;
+
+        if (ordinal < 0 || ordinal >= type.getEnumType().getValueArray().length)
+        {
+            throw new IllegalArgumentException("valueIndex " + ordinal
+                    + " out of allowed range [0.." + (type.getEnumType().getValueArray().length - 1)
+                    + "] of type '" + type.getName() + "'.");
+        }
+        this.type = type;
+        this.ordinal = ordinal;
+    }
+
+    /**
+     * Creates an enumeration value.
+     * 
+     * @param type The enumeration type of this value.
+     * @param value The string value (needs to be one of the values of <var>type</var>).
+     * @throws IllegalArgumentException If the <var>value</var> is not one of the values of
+     *             <var>type</var>.
+     */
+    public HDF5EnumerationValue(HDF5EnumerationType type, String value)
+            throws IllegalArgumentException
+    {
+        assert type != null;
+        assert value != null;
+
+        final Integer valueIndexOrNull = type.tryGetIndexForValue(value);
+        if (valueIndexOrNull == null)
+        {
+            throw new IllegalArgumentException("Value '" + value + "' is not allowed for type '"
+                    + type.getName() + "'.");
+        }
+        this.type = type;
+        this.ordinal = valueIndexOrNull;
+    }
+
+    /**
+     * Returns the <var>type</var> of this enumeration value.
+     */
+    public HDF5EnumerationType getType()
+    {
+        return type;
+    }
+
+    /**
+     * Returns the string value.
+     */
+    public String getValue()
+    {
+        return type.getEnumType().getValueArray()[ordinal];
+    }
+
+    /**
+     * Returns the ordinal value.
+     */
+    public int getOrdinal()
+    {
+        return ordinal;
+    }
+
+    /**
+     * Returns the value as Enum of type <var>enumClass</var>.
+     */
+    public <T extends Enum<T>> T getValue(Class<T> enumClass)
+    {
+        return Enum.valueOf(enumClass, getValue());
+    }
+
+    /**
+     * Returns a description of this value.
+     */
+    public String getDescription()
+    {
+        return type.getName() + " [" + type.getEnumType().getValueArray()[ordinal] + "]";
+    }
+
+    byte[] toStorageForm()
+    {
+        return type.getEnumType().toStorageForm(ordinal);
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((type == null) ? 0 : type.hashCode());
+        result = prime * result + ordinal;
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        HDF5EnumerationValue other = (HDF5EnumerationValue) obj;
+        if (type == null)
+        {
+            if (other.type != null)
+            {
+                return false;
+            }
+        } else if (type.equals(other.type) == false)
+        {
+            return false;
+        }
+        if (ordinal != other.ordinal)
+        {
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public String toString()
+    {
+        return getValue();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueArray.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueArray.java
new file mode 100644
index 0000000..c997440
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueArray.java
@@ -0,0 +1,625 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Array;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.hdf5.HDF5EnumerationType.EnumStorageForm;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class the represents an array of HDF enumeration values.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5EnumerationValueArray implements Iterable<String>
+{
+
+    private final HDF5EnumerationType type;
+
+    private final int length;
+
+    private EnumStorageForm storageForm;
+
+    private byte[] bArrayOrNull;
+
+    private short[] sArrayOrNull;
+
+    private int[] iArrayOrNull;
+
+    HDF5EnumerationValueArray(HDF5EnumerationType type, Object array)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        if (array instanceof byte[])
+        {
+            final byte[] bArray = (byte[]) array;
+            this.length = bArray.length;
+            setOrdinalArray(bArray);
+        } else if (array instanceof short[])
+        {
+            final short[] sArray = (short[]) array;
+            this.length = sArray.length;
+            setOrdinalArray(sArray);
+        } else if (array instanceof int[])
+        {
+            final int[] iArray = (int[]) array;
+            this.length = iArray.length;
+            setOrdinalArray(iArray);
+        } else
+        {
+            throw new IllegalArgumentException("array is of illegal type "
+                    + array.getClass().getCanonicalName());
+        }
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueArray(HDF5EnumerationType type, byte[] ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        this.length = ordinalArray.length;
+        setOrdinalArray(ordinalArray);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueArray(HDF5EnumerationType type, short[] ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        this.length = ordinalArray.length;
+        setOrdinalArray(ordinalArray);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueArray(HDF5EnumerationType type, int[] ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        this.length = ordinalArray.length;
+        setOrdinalArray(ordinalArray);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param valueArray The array of enum values (each one needs to be one of the values of
+     *            <var>type</var>).
+     * @throws IllegalArgumentException If any of the values in the <var>valueArray</var> is not one
+     *             of the values of <var>type</var>.
+     */
+    public HDF5EnumerationValueArray(HDF5EnumerationType type, Enum<?>[] valueArray)
+            throws IllegalArgumentException
+    {
+        this(type, toString(valueArray));
+    }
+
+    private static String[] toString(Enum<?>[] valueArray)
+    {
+        final String[] result = new String[valueArray.length];
+        for (int i = 0; i < valueArray.length; ++i)
+        {
+            result[i] = valueArray[i].name();
+        }
+        return result;
+    }
+    
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param valueArray The array of string values (each one needs to be one of the values of
+     *            <var>type</var>).
+     * @throws IllegalArgumentException If any of the values in the <var>valueArray</var> is not one
+     *             of the values of <var>type</var>.
+     */
+    public HDF5EnumerationValueArray(HDF5EnumerationType type, String[] valueArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        this.length = valueArray.length;
+        map(valueArray);
+    }
+
+    private void map(String[] array) throws IllegalArgumentException
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = new byte[array.length];
+            for (int i = 0; i < array.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(array[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + array[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                bArrayOrNull[i] = indexOrNull.byteValue();
+            }
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = new short[array.length];
+            for (int i = 0; i < array.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(array[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + array[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                sArrayOrNull[i] = indexOrNull.shortValue();
+            }
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = new int[array.length];
+            for (int i = 0; i < array.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(array[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + array[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                iArrayOrNull[i] = indexOrNull.intValue();
+            }
+        }
+    }
+
+    private void setOrdinalArray(byte[] array)
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = array;
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = toShortArray(array);
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = toIntArray(array);
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private void setOrdinalArray(short[] array) throws IllegalArgumentException
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = toByteArray(array);
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = array;
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = toIntArray(array);
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private void setOrdinalArray(int[] array) throws IllegalArgumentException
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = toByteArray(array);
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = toShortArray(array);
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = array;
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private byte[] toByteArray(short[] array) throws IllegalArgumentException
+    {
+        final byte[] bArray = new byte[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            bArray[i] = (byte) array[i];
+            if (bArray[i] != array[i])
+            {
+                throw new IllegalArgumentException("Value " + array[i]
+                        + " cannot be stored in byte array");
+            }
+        }
+        return bArray;
+    }
+
+    private byte[] toByteArray(int[] array) throws IllegalArgumentException
+    {
+        final byte[] bArray = new byte[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            bArray[i] = (byte) array[i];
+            if (bArray[i] != array[i])
+            {
+                throw new IllegalArgumentException("Value " + array[i]
+                        + " cannot be stored in byte array");
+            }
+        }
+        return bArray;
+    }
+
+    private short[] toShortArray(byte[] array)
+    {
+        final short[] sArray = new short[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            sArray[i] = array[i];
+        }
+        return sArray;
+    }
+
+    private short[] toShortArray(int[] array) throws IllegalArgumentException
+    {
+        final short[] sArray = new short[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            sArray[i] = (short) array[i];
+            if (sArray[i] != array[i])
+            {
+                throw new IllegalArgumentException("Value " + array[i]
+                        + " cannot be stored in short array");
+            }
+        }
+        return sArray;
+    }
+
+    private int[] toIntArray(byte[] array)
+    {
+        final int[] iArray = new int[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            iArray[i] = array[i];
+        }
+        return iArray;
+    }
+
+    private int[] toIntArray(short[] array)
+    {
+        final int[] iArray = new int[array.length];
+        for (int i = 0; i < array.length; ++i)
+        {
+            iArray[i] = array[i];
+        }
+        return iArray;
+    }
+
+    private void checkOrdinalArray(byte[] array) throws IllegalArgumentException
+    {
+        for (int i = 0; i < array.length; ++i)
+        {
+            if (array[i] < 0 || array[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + array[i]
+                        + " out of allowed range [0.." + (type.getEnumType().getValueArray().length - 1)
+                        + "] of type '" + type.getName() + "'.");
+            }
+        }
+    }
+
+    private void checkOrdinalArray(short[] array) throws IllegalArgumentException
+    {
+        for (int i = 0; i < array.length; ++i)
+        {
+            if (array[i] < 0 || array[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + array[i]
+                        + " out of allowed range [0.." + (type.getEnumType().getValueArray().length - 1)
+                        + "] of type '" + type.getName() + "'.");
+            }
+        }
+    }
+
+    private void checkOrdinalArray(int[] array) throws IllegalArgumentException
+    {
+        for (int i = 0; i < array.length; ++i)
+        {
+            if (array[i] < 0 || array[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + array[i]
+                        + " out of allowed range [0.." + (type.getEnumType().getValueArray().length - 1)
+                        + "] of type '" + type.getName() + "'.");
+            }
+        }
+    }
+
+    EnumStorageForm getStorageForm()
+    {
+        return storageForm;
+    }
+
+    byte[] getStorageFormBArray()
+    {
+        return bArrayOrNull;
+    }
+
+    short[] getStorageFormSArray()
+    {
+        return sArrayOrNull;
+    }
+
+    int[] getStorageFormIArray()
+    {
+        return iArrayOrNull;
+    }
+
+    /**
+     * Returns the <var>type</var> of this enumeration array.
+     */
+    public HDF5EnumerationType getType()
+    {
+        return type;
+    }
+
+    /**
+     * Returns the number of members of this enumeration array.
+     */
+    public int getLength()
+    {
+        return length;
+    }
+
+    /**
+     * Returns the ordinal value for the <var>arrayIndex</var>.
+     * 
+     * @param arrayIndex The index in the array to get the ordinal for.
+     */
+    public int getOrdinal(int arrayIndex)
+    {
+        if (bArrayOrNull != null)
+        {
+            return bArrayOrNull[arrayIndex];
+        } else if (sArrayOrNull != null)
+        {
+            return sArrayOrNull[arrayIndex];
+        } else
+        {
+            return iArrayOrNull[arrayIndex];
+        }
+    }
+
+    /**
+     * Returns the string value for <var>arrayIndex</var>.
+     * 
+     * @param arrayIndex The index in the array to get the value for.
+     */
+    public String getValue(int arrayIndex)
+    {
+        return type.getValues().get(getOrdinal(arrayIndex));
+    }
+
+    /**
+     * Returns the value as Enum of type <var>enumClass</var>.
+     * 
+     * @param enumClass The class to return the value as.
+     * @param arrayIndex The index in the array to get the value for.
+     */
+    public <T extends Enum<T>> T getValue(Class<T> enumClass, int arrayIndex)
+    {
+        return Enum.valueOf(enumClass, getValue(arrayIndex));
+    }
+
+    /**
+     * Returns the string values for all elements of this array.
+     */
+    public String[] toStringArray()
+    {
+        final int len = getLength();
+        final String[] values = new String[len];
+        for (int i = 0; i < len; ++i)
+        {
+            values[i] = getValue(i);
+        }
+        return values;
+    }
+
+    /**
+     * Returns the values for all elements of this array as Enums of type <var>enumClass</var>.
+     */
+    public <T extends Enum<T>> T[] toEnumArray(Class<T> enumClass)
+    {
+        final int len = getLength();
+        @SuppressWarnings("unchecked")
+        final T[] result = (T[]) Array.newInstance(enumClass, len);
+        for (int i = 0; i < len; ++i)
+        {
+            try
+            {
+                result[i] = Enum.valueOf(enumClass, getValue(i));
+            } catch (IllegalArgumentException ex)
+            {
+                throw new HDF5JavaException("The Java enum class " + enumClass.getCanonicalName()
+                        + " has no value '" + getValue(i) + "'.");
+            }
+        }
+        return result;
+    }
+
+    byte[] toStorageForm()
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return getStorageFormBArray();
+            case SHORT:
+                return NativeData.shortToByte(getStorageFormSArray(), ByteOrder.NATIVE);
+            case INT:
+                return NativeData.intToByte(getStorageFormIArray(), ByteOrder.NATIVE);
+        }
+        throw new Error("Illegal storage form (" + getStorageForm() + ".)");
+    }
+
+    static HDF5EnumerationValueArray fromStorageForm(HDF5EnumerationType enumType, byte[] data,
+            int offset, int len)
+    {
+        switch (enumType.getStorageForm())
+        {
+            case BYTE:
+                final byte[] subArray = new byte[len];
+                System.arraycopy(data, offset, subArray, 0, len);
+                return new HDF5EnumerationValueArray(enumType, subArray);
+            case SHORT:
+                return new HDF5EnumerationValueArray(enumType, HDFNativeData.byteToShort(data,
+                        offset, len));
+            case INT:
+                return new HDF5EnumerationValueArray(enumType, HDFNativeData.byteToInt(data,
+                        offset, len));
+        }
+        throw new Error("Illegal storage form (" + enumType.getStorageForm() + ".)");
+    }
+
+    static String[] fromStorageFormToStringArray(HDF5EnumerationType enumType, byte[] data,
+            int offset, int len)
+    {
+        final String[] valueArray = new String[len];
+        for (int i = 0; i < len; ++i)
+        {
+            valueArray[i] = enumType.createStringFromStorageForm(data, offset + i);
+        }
+        return valueArray;
+    }
+
+    static int[] fromStorageFormToIntArray(HDF5EnumerationType enumType, byte[] data,
+            int offset, int len)
+    {
+        final int[] valueArray = new int[len];
+        for (int i = 0; i < len; ++i)
+        {
+            valueArray[i] = enumType.getOrdinalFromStorageForm(data, offset + i);
+        }
+        return valueArray;
+    }
+
+    //
+    // Iterable
+    //
+
+    @Override
+    public Iterator<String> iterator()
+    {
+        return new Iterator<String>()
+            {
+                private int index = 0;
+
+                @Override
+                public boolean hasNext()
+                {
+                    return index < length;
+                }
+
+                @Override
+                public String next()
+                {
+                    return getValue(index++);
+                }
+
+                @Override
+                public void remove() throws UnsupportedOperationException
+                {
+                    throw new UnsupportedOperationException();
+                }
+
+            };
+    }
+
+    @Override
+    public String toString()
+    {
+        final StringBuilder b = new StringBuilder();
+        b.append(type.getName());
+        b.append(" [");
+        for (String value : this)
+        {
+            b.append(value);
+            b.append(",");
+        }
+        b.setLength(b.length() - 1);
+        b.append("]");
+        return b.toString();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueMDArray.java b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueMDArray.java
new file mode 100644
index 0000000..d039865
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5EnumerationValueMDArray.java
@@ -0,0 +1,766 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Iterator;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5EnumerationType.EnumStorageForm;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class that represents a multi-dimensional array of HDF enumeration values.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5EnumerationValueMDArray implements Iterable<MDArray<String>.ArrayEntry>
+{
+    private final HDF5EnumerationType type;
+
+    private EnumStorageForm storageForm;
+
+    private MDByteArray bArrayOrNull;
+
+    private MDShortArray sArrayOrNull;
+
+    private MDIntArray iArrayOrNull;
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>. Has to be one of
+     *            {@link MDByteArray}, {@link MDShortArray} or {@link MDIntArray}.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueMDArray(HDF5EnumerationType type, MDAbstractArray<?> ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        if (ordinalArray instanceof MDByteArray)
+        {
+            final MDByteArray bArray = (MDByteArray) ordinalArray;
+            setOrdinalArray(bArray);
+        } else if (ordinalArray instanceof MDShortArray)
+        {
+            final MDShortArray sArray = (MDShortArray) ordinalArray;
+            setOrdinalArray(sArray);
+        } else if (ordinalArray instanceof MDIntArray)
+        {
+            final MDIntArray iArray = (MDIntArray) ordinalArray;
+            setOrdinalArray(iArray);
+        } else if (ordinalArray instanceof MDArray)
+        {
+            final MDArray<?> concreteArray = (MDArray<?>) ordinalArray;
+            if (concreteArray.getAsFlatArray().getClass().getComponentType() == String.class)
+            {
+                @SuppressWarnings("unchecked")
+                final MDArray<String> sArray = (MDArray<String>) concreteArray;
+                map(sArray);
+            } else if (concreteArray.getAsFlatArray().getClass().getComponentType().isEnum())
+            {
+                @SuppressWarnings("unchecked")
+                final MDArray<Enum<?>> eArray = (MDArray<Enum<?>>) concreteArray;
+                map(toString(eArray));
+            } else
+            {
+                throw new IllegalArgumentException("array has illegal component type "
+                        + concreteArray.getAsFlatArray().getClass().getComponentType()
+                                .getCanonicalName());
+            }
+        } else
+        {
+            throw new IllegalArgumentException("array is of illegal type "
+                    + ordinalArray.getClass().getCanonicalName());
+        }
+    }
+
+    /**
+     * Creates an empty array with given <var>rank</var>.
+     */
+    public HDF5EnumerationValueMDArray(int rank)
+    {
+        this.type = null;
+        storageForm = EnumStorageForm.BYTE;
+        this.bArrayOrNull = new MDByteArray(new int[rank]);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueMDArray(HDF5EnumerationType type, MDByteArray ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        setOrdinalArray(ordinalArray);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueMDArray(HDF5EnumerationType type, MDShortArray ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        setOrdinalArray(ordinalArray);
+    }
+
+    /**
+     * Creates an enumeration value array.
+     * 
+     * @param type The enumeration type of this value.
+     * @param ordinalArray The array of ordinal values in the <var>type</var>.
+     * @throws IllegalArgumentException If any of the ordinals in the <var>ordinalArray</var> is
+     *             outside of the range of allowed values of the <var>type</var>.
+     */
+    public HDF5EnumerationValueMDArray(HDF5EnumerationType type, MDIntArray ordinalArray)
+            throws IllegalArgumentException
+    {
+        this.type = type;
+        setOrdinalArray(ordinalArray);
+    }
+
+    private static MDArray<String> toString(MDArray<Enum<?>> valueArray)
+    {
+        final Enum<?>[] flatEnumArray = valueArray.getAsFlatArray();
+        final MDArray<String> result = new MDArray<String>(String.class, valueArray.dimensions());
+        final String[] flatStringArray = result.getAsFlatArray();
+        for (int i = 0; i < flatEnumArray.length; ++i)
+        {
+            flatStringArray[i] = flatEnumArray[i].name();
+        }
+        return result;
+    }
+
+    private void map(MDArray<String> array) throws IllegalArgumentException
+    {
+        final String[] flatArray = array.getAsFlatArray();
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = new MDByteArray(array.dimensions());
+            final byte[] flatBArray = bArrayOrNull.getAsFlatArray();
+            for (int i = 0; i < flatArray.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(flatArray[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + flatArray[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                flatBArray[i] = indexOrNull.byteValue();
+            }
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = new MDShortArray(array.dimensions());
+            final short[] flatSArray = sArrayOrNull.getAsFlatArray();
+            for (int i = 0; i < flatArray.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(flatArray[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + flatArray[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                flatSArray[i] = indexOrNull.shortValue();
+            }
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = new MDIntArray(array.dimensions());
+            final int[] flatIArray = iArrayOrNull.getAsFlatArray();
+            for (int i = 0; i < flatIArray.length; ++i)
+            {
+                final Integer indexOrNull = type.tryGetIndexForValue(flatArray[i]);
+                if (indexOrNull == null)
+                {
+                    throw new IllegalArgumentException("Value '" + flatArray[i]
+                            + "' is not allowed for type '" + type.getName() + "'.");
+                }
+                flatIArray[i] = indexOrNull.intValue();
+            }
+        }
+    }
+
+    private void setOrdinalArray(MDByteArray array)
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = array;
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = toShortArray(array);
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = toIntArray(array);
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private void setOrdinalArray(MDShortArray array) throws IllegalArgumentException
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = toByteArray(array);
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = array;
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = toIntArray(array);
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private void setOrdinalArray(MDIntArray array) throws IllegalArgumentException
+    {
+        if (type.getEnumType().getValueArray().length < Byte.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.BYTE;
+            bArrayOrNull = toByteArray(array);
+            checkOrdinalArray(bArrayOrNull);
+            sArrayOrNull = null;
+            iArrayOrNull = null;
+        } else if (type.getEnumType().getValueArray().length < Short.MAX_VALUE)
+        {
+            storageForm = EnumStorageForm.SHORT;
+            bArrayOrNull = null;
+            sArrayOrNull = toShortArray(array);
+            checkOrdinalArray(sArrayOrNull);
+            iArrayOrNull = null;
+        } else
+        {
+            storageForm = EnumStorageForm.INT;
+            bArrayOrNull = null;
+            sArrayOrNull = null;
+            iArrayOrNull = array;
+            checkOrdinalArray(iArrayOrNull);
+        }
+    }
+
+    private MDByteArray toByteArray(MDShortArray array) throws IllegalArgumentException
+    {
+        final short[] flatSourceArray = array.getAsFlatArray();
+        final MDByteArray bArray = new MDByteArray(array.dimensions());
+        final byte[] flatTargetArray = bArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = (byte) flatSourceArray[i];
+            if (flatTargetArray[i] != flatSourceArray[i])
+            {
+                throw new IllegalArgumentException("Value " + flatSourceArray[i]
+                        + " cannot be stored in byte array");
+            }
+        }
+        return bArray;
+    }
+
+    private MDByteArray toByteArray(MDIntArray array) throws IllegalArgumentException
+    {
+        final int[] flatSourceArray = array.getAsFlatArray();
+        final MDByteArray bArray = new MDByteArray(array.dimensions());
+        final byte[] flatTargetArray = bArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = (byte) flatSourceArray[i];
+            if (flatTargetArray[i] != flatSourceArray[i])
+            {
+                throw new IllegalArgumentException("Value " + flatSourceArray[i]
+                        + " cannot be stored in byte array");
+            }
+        }
+        return bArray;
+    }
+
+    private MDShortArray toShortArray(MDByteArray array)
+    {
+        final byte[] flatSourceArray = array.getAsFlatArray();
+        final MDShortArray sArray = new MDShortArray(array.dimensions());
+        final short[] flatTargetArray = sArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = flatSourceArray[i];
+        }
+        return sArray;
+    }
+
+    private MDShortArray toShortArray(MDIntArray array) throws IllegalArgumentException
+    {
+        final int[] flatSourceArray = array.getAsFlatArray();
+        final MDShortArray sArray = new MDShortArray(array.dimensions());
+        final short[] flatTargetArray = sArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = (short) flatSourceArray[i];
+            if (flatSourceArray[i] != flatTargetArray[i])
+            {
+                throw new IllegalArgumentException("Value " + flatSourceArray[i]
+                        + " cannot be stored in short array");
+            }
+        }
+        return sArray;
+    }
+
+    private MDIntArray toIntArray(MDByteArray array)
+    {
+        final byte[] flatSourceArray = array.getAsFlatArray();
+        final MDIntArray iArray = new MDIntArray(array.dimensions());
+        final int[] flatTargetArray = iArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = flatSourceArray[i];
+        }
+        return iArray;
+    }
+
+    private MDIntArray toIntArray(MDShortArray array)
+    {
+        final short[] flatSourceArray = array.getAsFlatArray();
+        final MDIntArray iArray = new MDIntArray(array.dimensions());
+        final int[] flatTargetArray = iArray.getAsFlatArray();
+        for (int i = 0; i < flatSourceArray.length; ++i)
+        {
+            flatTargetArray[i] = flatSourceArray[i];
+        }
+        return iArray;
+    }
+
+    private void checkOrdinalArray(MDByteArray array) throws IllegalArgumentException
+    {
+        final byte[] flatArray = array.getAsFlatArray();
+        for (int i = 0; i < flatArray.length; ++i)
+        {
+            if (flatArray[i] < 0 || flatArray[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + flatArray[i]
+                        + " out of allowed range [0.."
+                        + (type.getEnumType().getValueArray().length - 1) + "] of type '"
+                        + type.getName() + "'.");
+            }
+        }
+    }
+
+    private void checkOrdinalArray(MDShortArray array) throws IllegalArgumentException
+    {
+        final short[] flatArray = array.getAsFlatArray();
+        for (int i = 0; i < flatArray.length; ++i)
+        {
+            if (flatArray[i] < 0 || flatArray[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + flatArray[i]
+                        + " out of allowed range [0.."
+                        + (type.getEnumType().getValueArray().length - 1) + "] of type '"
+                        + type.getName() + "'.");
+            }
+        }
+    }
+
+    private void checkOrdinalArray(MDIntArray array) throws IllegalArgumentException
+    {
+        final int[] flatArray = array.getAsFlatArray();
+        for (int i = 0; i < flatArray.length; ++i)
+        {
+            if (flatArray[i] < 0 || flatArray[i] >= type.getEnumType().getValueArray().length)
+            {
+                throw new IllegalArgumentException("valueIndex " + flatArray[i]
+                        + " out of allowed range [0.."
+                        + (type.getEnumType().getValueArray().length - 1) + "] of type '"
+                        + type.getName() + "'.");
+            }
+        }
+    }
+
+    EnumStorageForm getStorageForm()
+    {
+        return storageForm;
+    }
+
+    /**
+     * Returns the <var>type</var> of this enumeration array.
+     */
+    public HDF5EnumerationType getType()
+    {
+        return type;
+    }
+
+    /**
+     * Returns the number of elements of this enumeration array.
+     */
+    public int size()
+    {
+        return getOrdinalValues().size();
+    }
+
+    /**
+     * Returns the extent of this enum array along its <var>dim</var>-th axis.
+     */
+    public int size(int dim)
+    {
+        assert dim < dimensions().length;
+
+        return dimensions()[dim];
+    }
+
+    /**
+     * Returns the rank of this multi-dimensional enumeration array.
+     */
+    public int rank()
+    {
+        return dimensions().length;
+    }
+
+    /**
+     * Returns the dimensions of this enumeration array.
+     */
+    public int[] dimensions()
+    {
+        return getOrdinalValues().dimensions();
+    }
+
+    /**
+     * Returns the dimensions of this enumeration array as a long.
+     */
+    public long[] longDimensions()
+    {
+        return getOrdinalValues().longDimensions();
+    }
+
+    /**
+     * Returns the ordinal value for the <var>arrayIndex</var>.
+     * 
+     * @param arrayIndex The index in the array to get the ordinal for.
+     */
+    public int getOrdinal(int arrayIndex)
+    {
+        if (bArrayOrNull != null)
+        {
+            return bArrayOrNull.get(arrayIndex);
+        } else if (sArrayOrNull != null)
+        {
+            return sArrayOrNull.get(arrayIndex);
+        } else
+        {
+            return iArrayOrNull.get(arrayIndex);
+        }
+    }
+
+    /**
+     * Returns the ordinal value for the <var>arrayIndex</var>.
+     * 
+     * @param arrayIndexX The x index in the array to get the ordinal for.
+     * @param arrayIndexY The y index in the array to get the ordinal for.
+     */
+    public int getOrdinal(int arrayIndexX, int arrayIndexY)
+    {
+        if (bArrayOrNull != null)
+        {
+            return bArrayOrNull.get(arrayIndexX, arrayIndexY);
+        } else if (sArrayOrNull != null)
+        {
+            return sArrayOrNull.get(arrayIndexX, arrayIndexY);
+        } else
+        {
+            return iArrayOrNull.get(arrayIndexX, arrayIndexY);
+        }
+    }
+
+    /**
+     * Returns the ordinal value for the <var>arrayIndex</var>.
+     * 
+     * @param arrayIndices The indices in the array to get the ordinal for.
+     */
+    public int getOrdinal(int... arrayIndices)
+    {
+        if (bArrayOrNull != null)
+        {
+            return bArrayOrNull.get(arrayIndices);
+        } else if (sArrayOrNull != null)
+        {
+            return sArrayOrNull.get(arrayIndices);
+        } else
+        {
+            return iArrayOrNull.get(arrayIndices);
+        }
+    }
+
+    /**
+     * Returns the string value for <var>arrayIndex</var>.
+     * 
+     * @param arrayIndex The index in the array to get the value for.
+     */
+    public String getValue(int arrayIndex)
+    {
+        return type.getValues().get(getOrdinal(arrayIndex));
+    }
+
+    /**
+     * Returns the string value for <var>arrayIndex</var>.
+     * 
+     * @param arrayIndexX The x index in the array to get the value for.
+     * @param arrayIndexY The y index in the array to get the value for.
+     */
+    public String getValue(int arrayIndexX, int arrayIndexY)
+    {
+        return type.getValues().get(getOrdinal(arrayIndexX, arrayIndexY));
+    }
+
+    /**
+     * Returns the string value for <var>arrayIndex</var>.
+     * 
+     * @param arrayIndices The indices in the array to get the value for.
+     */
+    public String getValue(int... arrayIndices)
+    {
+        return type.getValues().get(getOrdinal(arrayIndices));
+    }
+
+    /**
+     * Returns the value as Enum of type <var>enumClass</var>.
+     * 
+     * @param enumClass The class to return the value as.
+     * @param arrayIndices The indices in the array to get the value for.
+     */
+    public <T extends Enum<T>> T getValue(Class<T> enumClass, int... arrayIndices)
+    {
+        return Enum.valueOf(enumClass, getValue(arrayIndices));
+    }
+
+    /**
+     * Returns the string values for all elements of this array.
+     */
+    public MDArray<String> toStringArray()
+    {
+        final int len = size();
+        final MDArray<String> values = new MDArray<String>(String.class, dimensions());
+        final String[] flatValues = values.getAsFlatArray();
+        for (int i = 0; i < len; ++i)
+        {
+            flatValues[i] = getValue(i);
+        }
+        return values;
+    }
+
+    /**
+     * Returns the values for all elements of this array as an enum array with enums of type
+     * <var>enumClass</var>.
+     */
+    public <T extends Enum<T>> MDArray<T> toEnumArray(Class<T> enumClass)
+    {
+        final int len = size();
+        final MDArray<T> values = new MDArray<T>(enumClass, dimensions());
+        final T[] flatValues = values.getAsFlatArray();
+        for (int i = 0; i < len; ++i)
+        {
+            flatValues[i] = Enum.valueOf(enumClass, getValue(i));
+        }
+        return values;
+    }
+
+    /**
+     * Returns the ordinal values for all elements of this array.
+     */
+    public MDAbstractArray<?> getOrdinalValues()
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return bArrayOrNull;
+            case SHORT:
+                return sArrayOrNull;
+            case INT:
+                return iArrayOrNull;
+        }
+        throw new Error("Illegal storage form.");
+    }
+
+    byte[] toStorageForm()
+    {
+        switch (getStorageForm())
+        {
+            case BYTE:
+                return bArrayOrNull.getAsFlatArray();
+            case SHORT:
+                return NativeData.shortToByte(sArrayOrNull.getAsFlatArray(), ByteOrder.NATIVE);
+            case INT:
+                return NativeData.intToByte(iArrayOrNull.getAsFlatArray(), ByteOrder.NATIVE);
+        }
+        throw new Error("Illegal storage form (" + getStorageForm() + ".)");
+    }
+
+    static HDF5EnumerationValueMDArray fromStorageForm(HDF5EnumerationType enumType, byte[] data,
+            int offset, int[] dimensions, int len)
+    {
+        switch (enumType.getStorageForm())
+        {
+            case BYTE:
+                final byte[] subArray = new byte[len];
+                System.arraycopy(data, offset, subArray, 0, len);
+                return new HDF5EnumerationValueMDArray(enumType, new MDByteArray(subArray,
+                        dimensions));
+            case SHORT:
+                return new HDF5EnumerationValueMDArray(enumType, new MDShortArray(
+                        HDFNativeData.byteToShort(data, offset, len), dimensions));
+            case INT:
+                return new HDF5EnumerationValueMDArray(enumType, new MDIntArray(
+                        HDFNativeData.byteToInt(data, offset, len), dimensions));
+        }
+        throw new Error("Illegal storage form (" + enumType.getStorageForm() + ".)");
+    }
+
+    //
+    // Iterable
+    //
+
+    @Override
+    public Iterator<MDArray<String>.ArrayEntry> iterator()
+    {
+        return toStringArray().iterator();
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public String toString()
+    {
+        return toStringArray().toString();
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((bArrayOrNull == null) ? 0 : bArrayOrNull.hashCode());
+        result = prime * result + ((iArrayOrNull == null) ? 0 : iArrayOrNull.hashCode());
+        result = prime * result + ((sArrayOrNull == null) ? 0 : sArrayOrNull.hashCode());
+        result = prime * result + ((storageForm == null) ? 0 : storageForm.hashCode());
+        result = prime * result + ((type == null) ? 0 : type.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        HDF5EnumerationValueMDArray other = (HDF5EnumerationValueMDArray) obj;
+        if (bArrayOrNull == null)
+        {
+            if (other.bArrayOrNull != null)
+            {
+                return false;
+            }
+        } else if (false == bArrayOrNull.equals(other.bArrayOrNull))
+        {
+            return false;
+        }
+        if (iArrayOrNull == null)
+        {
+            if (other.iArrayOrNull != null)
+            {
+                return false;
+            }
+        } else if (false == iArrayOrNull.equals(other.iArrayOrNull))
+        {
+            return false;
+        }
+        if (sArrayOrNull == null)
+        {
+            if (other.sArrayOrNull != null)
+            {
+                return false;
+            }
+        } else if (false == sArrayOrNull.equals(other.sArrayOrNull))
+        {
+            return false;
+        }
+        if (storageForm != other.storageForm)
+        {
+            return false;
+        }
+        if (type == null)
+        {
+            if (other.type != null)
+            {
+                return false;
+            }
+        } else if (false == type.equals(other.type))
+        {
+            return false;
+        }
+        return true;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5Factory.java b/source/java/ch/systemsx/cisd/hdf5/HDF5Factory.java
new file mode 100644
index 0000000..f972dae
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5Factory.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+/**
+ * A static wrapper for the {@link IHDF5Factory} for creating writers and readers of HDF5 files. For
+ * straight-forward creation, see methods {@link #open(File)} and {@link #openForReading(File)}. If
+ * you need full control over the creation process, see the methods {@link #configure(File)} and
+ * {@link #configureForReading(File)}.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5Factory
+{
+
+    /**
+     * Opens an HDF5 <var>file</var> for writing and reading. If the file does not yet exist, it
+     * will be created.
+     */
+    public static IHDF5Writer open(File file)
+    {
+        return HDF5FactoryProvider.get().open(file);
+    }
+
+    /**
+     * Opens an HDF5 file named <var>filePath</var> for writing and reading. If the file does not
+     * yet exist, it will be created.
+     */
+    public static IHDF5Writer open(String filePath)
+    {
+        return HDF5FactoryProvider.get().open(new File(filePath));
+    }
+
+    /**
+     * Opens an HDF5 <var>file</var> for reading. It is an error if the file does not exist.
+     */
+    public static IHDF5Reader openForReading(File file)
+    {
+        return HDF5FactoryProvider.get().openForReading(file);
+    }
+
+    /**
+     * Opens an HDF5 file named <var>filePath</var> for reading. It is an error if the file does not
+     * exist.
+     */
+    public static IHDF5Reader openForReading(String filePath)
+    {
+        return HDF5FactoryProvider.get().openForReading(new File(filePath));
+    }
+
+    /**
+     * Opens a configurator for an HDF5 <var>file</var> for writing and reading. Configure the
+     * writer as you need and then call {@link IHDF5WriterConfigurator#writer()} in order to start
+     * reading and writing the file.
+     */
+    public static IHDF5WriterConfigurator configure(File file)
+    {
+        return HDF5FactoryProvider.get().configure(file);
+    }
+
+    /**
+     * Opens a configurator for an HDF5 file named <var>filePath</var> for writing and reading.
+     * Configure the writer as you need and then call {@link IHDF5WriterConfigurator#writer()} in
+     * order to start reading and writing the file.
+     */
+    public static IHDF5WriterConfigurator configure(String filePath)
+    {
+        return HDF5FactoryProvider.get().configure(new File(filePath));
+    }
+
+    /**
+     * Opens a configurator for an HDF5 <var>file</var> for reading. Configure the reader as you
+     * need and then call {@link IHDF5ReaderConfigurator#reader()} in order to start reading the
+     * file.
+     */
+    public static IHDF5ReaderConfigurator configureForReading(File file)
+    {
+        return HDF5FactoryProvider.get().configureForReading(file);
+    }
+
+    /**
+     * Opens a configurator for an HDF5 file named <var>filePath</var> for reading. Configure the
+     * reader as you need and then call {@link IHDF5ReaderConfigurator#reader()} in order to start
+     * reading the file.
+     */
+    public static IHDF5ReaderConfigurator configureForReading(String filePath)
+    {
+        return HDF5FactoryProvider.get().configureForReading(new File(filePath));
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>file</var> is an HDF5 file and <code>false</code>
+     * otherwise.
+     */
+    public static boolean isHDF5File(File file)
+    {
+        return HDF5FactoryProvider.get().isHDF5File(file);
+    }
+
+    /**
+     * Returns <code>true</code>, if the file named <var>filePath</var> is an HDF5 file and
+     * <code>false</code> otherwise.
+     */
+    public static boolean isHDF5File(String filePath)
+    {
+        return HDF5FactoryProvider.get().isHDF5File(new File(filePath));
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FactoryProvider.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FactoryProvider.java
new file mode 100644
index 0000000..a8f233d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FactoryProvider.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+import ch.systemsx.cisd.hdf5.hdf5lib.H5F;
+
+/**
+ * Provides access to a factory for HDF5 readers and writers.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5FactoryProvider
+{
+    private static class HDF5Factory implements IHDF5Factory
+    {
+
+        @Override
+        public IHDF5WriterConfigurator configure(File file)
+        {
+            return new HDF5WriterConfigurator(file);
+        }
+
+        @Override
+        public IHDF5ReaderConfigurator configureForReading(File file)
+        {
+            return new HDF5ReaderConfigurator(file);
+        }
+
+        @Override
+        public IHDF5Writer open(File file)
+        {
+            return new HDF5WriterConfigurator(file).writer();
+        }
+
+        @Override
+        public IHDF5Reader openForReading(File file)
+        {
+            return new HDF5ReaderConfigurator(file).reader();
+        }
+
+        @Override
+        public boolean isHDF5File(File file)
+        {
+            return H5F.H5Fis_hdf5(file.getPath());
+        }
+
+    }
+
+    /**
+     * The (only) instance of the factory.
+     */
+    private static IHDF5Factory factory = new HDF5Factory();
+
+    private HDF5FactoryProvider()
+    {
+        // Not to be instantiated.
+    }
+
+    /**
+     * Returns the {@link IHDF5Factory}. This is your access to creation of {@link IHDF5Reader} and
+     * {@link IHDF5Writer} instances.
+     */
+    public static synchronized IHDF5Factory get()
+    {
+        return factory;
+    }
+
+    /**
+     * Sets the {@link IHDF5Factory}. In normal operation this method is not used, but it is a hook
+     * that can be used if you need to track or influence the factory's operation, for example for
+     * mocking in unit tests.
+     */
+    public static synchronized void set(IHDF5Factory factory)
+    {
+        HDF5FactoryProvider.factory = factory;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadOnlyHandler.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadOnlyHandler.java
new file mode 100644
index 0000000..6db0306
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadOnlyHandler.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+/**
+ * Implementation of {@link IHDF5FileLevelReadOnlyHandler}
+ *
+ * @author Bernd Rinn
+ */
+class HDF5FileLevelReadOnlyHandler implements IHDF5FileLevelReadOnlyHandler
+{
+    private final HDF5BaseReader baseReader;
+    
+    HDF5FileLevelReadOnlyHandler(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    @Override
+    public boolean isPerformNumericConversions()
+    {
+        return baseReader.performNumericConversions;
+    }
+    
+    @Override
+    public boolean isClosed()
+    {
+        return baseReader.isClosed();
+    }
+    
+    @Override
+    public String getHouseKeepingNameSuffix()
+    {
+        return baseReader.houseKeepingNameSuffix;
+    }
+    
+    @Override
+    public File getFile()
+    {
+        return baseReader.hdf5File;
+    }
+    
+    @Override
+    public void close()
+    {
+        baseReader.close();
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadWriteHandler.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadWriteHandler.java
new file mode 100644
index 0000000..8e93dba
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FileLevelReadWriteHandler.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.Flushable;
+
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * Implementation of {@link IHDF5FileLevelReadWriteHandler}.
+ *
+ * @author Bernd Rinn
+ */
+final class HDF5FileLevelReadWriteHandler extends HDF5FileLevelReadOnlyHandler implements IHDF5FileLevelReadWriteHandler
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5FileLevelReadWriteHandler(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Configuration
+    // /////////////////////
+
+    @Override
+    public boolean isUseExtendableDataTypes()
+    {
+        return baseWriter.useExtentableDataTypes;
+    }
+
+    @Override
+    public FileFormat getFileFormat()
+    {
+        return baseWriter.fileFormat;
+    }
+
+    // /////////////////////
+    // File
+    // /////////////////////
+
+    @Override
+    public void flush()
+    {
+        baseWriter.checkOpen();
+        baseWriter.flush();
+    }
+
+    @Override
+    public void flushSyncBlocking()
+    {
+        baseWriter.checkOpen();
+        baseWriter.flushSyncBlocking();
+    }
+
+    @Override
+    public boolean addFlushable(Flushable flushable)
+    {
+        return baseWriter.addFlushable(flushable);
+    }
+
+    @Override
+    public boolean removeFlushable(Flushable flushable)
+    {
+        return baseWriter.removeFlushable(flushable);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FloatReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatReader.java
new file mode 100644
index 0000000..6d0dcad
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_FLOAT;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5FloatReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5FloatReader implements IHDF5FloatReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5FloatReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public float getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Float> getAttributeRunnable = new ICallableWithCleanUp<Float>()
+            {
+                @Override
+                public Float call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final float[] data =
+                            baseReader.h5.readAttributeAsFloatArray(attributeId, H5T_NATIVE_FLOAT, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public float[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<float[]> getAttributeRunnable =
+                new ICallableWithCleanUp<float[]>()
+                    {
+                        @Override
+                        public float[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getFloatArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDFloatArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDFloatArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDFloatArray>()
+                    {
+                        @Override
+                        public MDFloatArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getFloatMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public float[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDFloatArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public float read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Float> readCallable = new ICallableWithCleanUp<Float>()
+            {
+                @Override
+                public Float call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final float[] data = new float[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_FLOAT, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public float[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<float[]> readCallable = new ICallableWithCleanUp<float[]>()
+            {
+                @Override
+                public float[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readFloatArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private float[] readFloatArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final float[] data = new float[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_FLOAT, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readFloatArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private float[] readFloatArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final float[] data = new float[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_FLOAT, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDFloatArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_FLOAT, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDFloatArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_FLOAT, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public float[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public float[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<float[]> readCallable = new ICallableWithCleanUp<float[]>()
+            {
+                @Override
+                public float[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final float[] data = new float[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_FLOAT, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public float[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDFloatArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public float[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDFloatArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public float[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDFloatArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDFloatArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDFloatArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDFloatArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDFloatArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDFloatArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDFloatArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDFloatArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDFloatArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDFloatArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDFloatArray> readCallable = new ICallableWithCleanUp<MDFloatArray>()
+            {
+                @Override
+                public MDFloatArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readFloatMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDFloatArray readFloatMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final float[] data = new float[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_FLOAT, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDFloatArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readFloatMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDFloatArray readFloatMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_FLOAT, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final float[] data = new float[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDFloatArray(data, arrayDimensions);
+        } else
+        {
+            final float[] data =
+                    new float[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDFloatArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDFloatArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDFloatArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDFloatArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDFloatArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDFloatArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDFloatArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDFloatArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDFloatArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDFloatArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDFloatArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDFloatArray> readCallable = new ICallableWithCleanUp<MDFloatArray>()
+            {
+                @Override
+                public MDFloatArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final float[] dataBlock = new float[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_FLOAT,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDFloatArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDFloatArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final float[] dataBlock =
+                new float[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_FLOAT, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDFloatArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<float[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<float[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<float[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<float[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<float[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final float[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<float[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDFloatArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDFloatArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDFloatArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDFloatArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDFloatArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDFloatArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDFloatArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    float[] getFloatArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_FLOAT, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_FLOAT;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final float[] data =
+                baseReader.h5.readAttributeAsFloatArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDFloatArray getFloatMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_FLOAT,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_FLOAT;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final float[] data =
+                    baseReader.h5.readAttributeAsFloatArray(attributeId,
+                            memoryTypeId, len);
+            return new MDFloatArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FloatStorageFeatures.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatStorageFeatures.java
new file mode 100644
index 0000000..9977d16
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatStorageFeatures.java
@@ -0,0 +1,869 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_FLOAT;
+
+/**
+ * An object representing the storage features that are to be used for a float data set.
+ * <p>
+ * The <code>..._KEEP</code> variants denote that the specified storage features should only be
+ * applied if a new data set has to be created. If the data set already exists, it will be kept with
+ * whatever storage features it has.
+ * <em>Note that this may lead to an exception if the existing data set is non-extendable and the 
+ * dimensions of the new data set differ from the dimensions of the existing data set.</em>
+ * <p>
+ * The <code>..._DELETE</code> variants denote that the specified storage features should always be
+ * applied. If the data set already exists, it will be deleted before the new data set is written.
+ * This is the default behavior. However, if the
+ * {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()} setting is given, the
+ * <code>..._DELETE</code> variant can be used to override this setting on a case-by-case basis.
+ * <p>
+ * The available storage layouts are {@link HDF5StorageLayout#COMPACT},
+ * {@link HDF5StorageLayout#CONTIGUOUS} or {@link HDF5StorageLayout#CHUNKED} can be chosen. Only
+ * {@link HDF5StorageLayout#CHUNKED} is extendable and can be compressed.
+ * <p>
+ * Two types of compressions are supported: <i>deflation</i> (the method used by <code>gzip</code>)
+ * and <i>scaling</i>, which can be used if the accuracy of the values are smaller than what the
+ * atomic data type can store. <b>Note that <i>scaling</i> in general is a lossy compression</b>
+ * while <i>deflation</i> is always lossless. <i>Scaling</i> compression is only available with HDF5
+ * 1.8 and newer. Trying to use <i>scaling</i> in strict HDF5 1.6 compatibility mode will throw an
+ * {@link IllegalStateException}.
+ * <p>
+ * For <i>deflation</i> the deflation level can be chosen to get the right balance between speed of
+ * compression and compression ratio. Often the {@link #DEFAULT_DEFLATION_LEVEL} will be the right
+ * choice.
+ * <p>
+ * For <i>scaling</i>, the scaling factor can be chosen that determines the accuracy of the values
+ * saved. For float values, the scaling factor determines the number of significant digits of the
+ * numbers. It is guaranteed that <code>{@literal |f_real - f_saved| < 10^(-scalingFactor)}</code>.
+ * The algorithm used for scale compression is:
+ * <ol>
+ * <li>Calculate the minimum value of all values</li>
+ * <li>Subtract the minimum value from all values</li>
+ * <li>Multiply all values obtained in step 2 with <code>{@literal 10^scalingFactor}</code></li>
+ * <li>Round the values obtained in step 3 to the nearest integer value</li>
+ * <li>Store the minimum found in step 1 and the values obtained in step 4</li>
+ * </ol>
+ * This algorithm is known as GRIB D-scaling.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5FloatStorageFeatures extends HDF5AbstractStorageFeatures
+{
+
+    /**
+     * Represents 'no compression', use default storage layout.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_NO_COMPRESSION =
+            new HDF5FloatStorageFeatures(null, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'no compression', use default storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_NO_COMPRESSION_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'no compression', use default storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_NO_COMPRESSION_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_COMPACT = new HDF5FloatStorageFeatures(
+            HDF5StorageLayout.COMPACT, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_COMPACT_KEEP = new HDF5FloatStorageFeatures(
+            HDF5StorageLayout.COMPACT, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_COMPACT_DELETE =
+            new HDF5FloatStorageFeatures(HDF5StorageLayout.COMPACT,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CONTIGUOUS = new HDF5FloatStorageFeatures(
+            HDF5StorageLayout.CONTIGUOUS, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CONTIGUOUS_KEEP =
+            new HDF5FloatStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CONTIGUOUS_DELETE =
+            new HDF5FloatStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CHUNKED = new HDF5FloatStorageFeatures(
+            HDF5StorageLayout.CHUNKED, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CHUNKED_KEEP = new HDF5FloatStorageFeatures(
+            HDF5StorageLayout.CHUNKED, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_CHUNKED_DELETE =
+            new HDF5FloatStorageFeatures(HDF5StorageLayout.CHUNKED,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE = new HDF5FloatStorageFeatures(null,
+            DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression' with a pre-filter shuffle, that is deflation with the
+     * default deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SHUFFLE_DEFLATE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.USE_WRITER_DEFAULT, true,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE_KEEP = new HDF5FloatStorageFeatures(
+            null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, DEFAULT_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE_MAX = new HDF5FloatStorageFeatures(
+            null, MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE_MAX_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_DEFLATE_MAX_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1 = new HDF5FloatStorageFeatures(
+            null, NO_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values combined with deflation using the
+     * default deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1_DEFLATE =
+            new HDF5FloatStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1_DEFLATE_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 1 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING1_DEFLATE_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 1);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2 = new HDF5FloatStorageFeatures(
+            null, NO_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values combined with deflation using the
+     * default deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2_DEFLATE =
+            new HDF5FloatStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2_DEFLATE_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 2 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING2_DEFLATE_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 2);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3 = new HDF5FloatStorageFeatures(
+            null, NO_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    true, NO_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values combined with deflation using the
+     * default deflation level.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3_DEFLATE =
+            new HDF5FloatStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3_DEFLATE_KEEP =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * Represents scaling with scaling factor 3 for float values combined with deflation using the
+     * default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5FloatStorageFeatures FLOAT_SCALING3_DEFLATE_DELETE =
+            new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, (byte) 3);
+
+    /**
+     * A builder for storage features.
+     */
+    public static final class HDF5FloatStorageFeatureBuilder extends
+            HDF5AbstractStorageFeatureBuilder
+    {
+        public HDF5FloatStorageFeatureBuilder()
+        {
+        }
+
+        public HDF5FloatStorageFeatureBuilder(HDF5AbstractStorageFeatures template)
+        {
+            super(template);
+        }
+
+        /**
+         * Compresses the dataset with default deflation level, if <code>compress==true</code>, do
+         * not compress if <code>compress==false</code>.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder compress(boolean compress)
+        {
+            super.compress(compress);
+            return this;
+        }
+
+        /**
+         * Compress the dataset with default deflation level.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder compress()
+        {
+            super.compress();
+            return this;
+        }
+
+        /**
+         * Compresses this dataset with the given <var>deflateLevel</var>.
+         * {@link #NO_DEFLATION_LEVEL} means: do not compress. A good default value is
+         * {@link #DEFAULT_DEFLATION_LEVEL}, the maximum value supported is
+         * {@link #MAX_DEFLATION_LEVEL}.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder deflateLevel(byte deflateLevel)
+        {
+            super.deflateLevel(deflateLevel);
+            return this;
+        }
+
+        /**
+         * Sets the scaling factor for an integer scaling pre-filter.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder scalingFactor(byte scalingFactor)
+        {
+            super.scalingFactor(scalingFactor);
+            return this;
+        }
+        
+        /**
+         * Disables the scaling pre-filter.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder noScaling()
+        {
+            super.noScaling();
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation if <code>shuffleBeforeDeflate==true</code> and
+         * disables it if <code>shuffleBeforeDeflate==false</code>. Theshuffling pre-filter may
+         * improve the compression level but may also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder shuffleBeforeDeflate(boolean shuffleBeforeDeflate)
+        {
+            super.shuffleBeforeDeflate(shuffleBeforeDeflate);
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation. This may improve the compression level but may
+         * also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @see #compress()
+         * @see #deflateLevel(byte)
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder shuffleBeforeDeflate()
+        {
+            super.shuffleBeforeDeflate();
+            return this;
+        }
+
+        /**
+         * Set the layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder storageLayout(HDF5StorageLayout proposedLayout)
+        {
+            super.storageLayout(proposedLayout);
+            return this;
+        }
+
+        /**
+         * Set a compact layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder compactStorageLayout()
+        {
+            super.compactStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a contiguous layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder contiguousStorageLayout()
+        {
+            super.contiguousStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a chunked layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder chunkedStorageLayout()
+        {
+            super.chunkedStorageLayout();
+            return this;
+        }
+
+        /**
+         * Let a heuristic choose the right layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder defaultStorageLayout()
+        {
+            this.defaultStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder datasetReplacementPolicy(
+                DataSetReplacementPolicy datasetReplacementPolicy)
+        {
+            super.datasetReplacementPolicy(datasetReplacementPolicy);
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#USE_WRITER_DEFAULT}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder datasetReplacementUseWriterDefault()
+        {
+            super.datasetReplacementUseWriterDefault();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_KEEP_EXISTING}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder datasetReplacementEnforceKeepExisting()
+        {
+            super.datasetReplacementEnforceKeepExisting();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_REPLACE_WITH_NEW}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5FloatStorageFeatureBuilder datasetReplacementEnforceReplaceWithNew()
+        {
+            super.datasetReplacementEnforceReplaceWithNew();
+            return this;
+        }
+
+        /**
+         * Returns the storage features corresponding to this builder's values.
+         */
+        @Override
+        public HDF5FloatStorageFeatures features()
+        {
+            return new HDF5FloatStorageFeatures(this);
+        }
+    }
+
+    /**
+     * Returns a new storage feature builder.
+     */
+    public static HDF5FloatStorageFeatureBuilder build()
+    {
+        return new HDF5FloatStorageFeatureBuilder();
+    }
+
+    /**
+     * Returns a new storage feature builder, initializing from <var>template</var>.
+     */
+    public static HDF5FloatStorageFeatureBuilder build(HDF5AbstractStorageFeatures template)
+    {
+        return new HDF5FloatStorageFeatureBuilder(template);
+    }
+
+    /**
+     * Create a corresponding {@link HDF5FloatStorageFeatures} for the given
+     * {@link HDF5GenericStorageFeatures}.
+     */
+    public static HDF5FloatStorageFeatures createFromGeneric(
+            HDF5GenericStorageFeatures storageFeatures)
+    {
+        if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CHUNKED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CHUNKED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CHUNKED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_COMPACT;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_COMPACT_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_COMPACT_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE_MAX;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_DELETE)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE_MAX_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_KEEP)
+        {
+            return HDF5FloatStorageFeatures.FLOAT_DEFLATE_MAX_KEEP;
+        } else
+        {
+            return new HDF5FloatStorageFeatures(storageFeatures.tryGetProposedLayout(),
+                    storageFeatures.getDatasetReplacementPolicy(),
+                    storageFeatures.getDeflateLevel(), NO_SCALING_FACTOR);
+        }
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    public static HDF5FloatStorageFeatures createDeflation(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, DataSetReplacementPolicy.USE_WRITER_DEFAULT);
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5FloatStorageFeatures createDeflationKeep(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING);
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static HDF5FloatStorageFeatures createDeflationDelete(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW);
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    private static HDF5FloatStorageFeatures createDeflation(int deflationLevel,
+            DataSetReplacementPolicy dataSetReplacementPolicy)
+    {
+        return new HDF5FloatStorageFeatures(null, dataSetReplacementPolicy, toByte(deflationLevel),
+                NO_SCALING_FACTOR);
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents float scaling with the
+     * given <var>scalingFactor</var>.
+     */
+    public static HDF5FloatStorageFeatures createFloatScaling(int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, NO_DEFLATION_LEVEL, toByte(scalingFactor));
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents float scaling with the
+     * given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5FloatStorageFeatures createFloatScalingKeep(int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                NO_DEFLATION_LEVEL, toByte(scalingFactor));
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the default
+     * deflation level and float scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5FloatStorageFeatures createDeflateAndFloatScaling(int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, toByte(scalingFactor));
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the default
+     * deflation level and float scaling with the given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5FloatStorageFeatures createDeflateAndFloatScalingKeep(int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                DEFAULT_DEFLATION_LEVEL, toByte(scalingFactor));
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and float scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5FloatStorageFeatures createDeflateAndFloatScaling(int deflateLevel,
+            int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, toByte(deflateLevel), toByte(scalingFactor));
+    }
+
+    /**
+     * Creates a {@link HDF5FloatStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and float scaling with the given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5FloatStorageFeatures createDeflateAndFloatScalingKeep(int deflateLevel,
+            int scalingFactor)
+    {
+        return new HDF5FloatStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                toByte(deflateLevel), toByte(scalingFactor));
+    }
+
+    HDF5FloatStorageFeatures(HDF5StorageLayout proposedLayoutOrNull, byte deflateLevel,
+            byte scalingFactor)
+    {
+        this(proposedLayoutOrNull, DataSetReplacementPolicy.USE_WRITER_DEFAULT, deflateLevel,
+                scalingFactor);
+    }
+
+    HDF5FloatStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, byte deflateLevel, byte scalingFactor)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, deflateLevel, scalingFactor);
+    }
+
+    HDF5FloatStorageFeatures(HDF5FloatStorageFeatureBuilder builder)
+    {
+        super(builder.getStorageLayout(), builder.getDatasetReplacementPolicy(), builder
+                .isShuffleBeforeDeflate(), builder.getDeflateLevel(), builder.getScalingFactor());
+    }
+
+    HDF5FloatStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, boolean shuffleBeforeDeflate,
+            byte deflateLevel, byte scalingFactor)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, shuffleBeforeDeflate, deflateLevel,
+                scalingFactor);
+    }
+
+    /**
+     * Returns true, if this compression setting can be applied on the given <var>dataClassId</var>.
+     */
+    @Override
+    boolean isCompatibleWithDataClass(int dataClassId)
+    {
+        return (dataClassId == H5T_FLOAT);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5FloatWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatWriter.java
new file mode 100644
index 0000000..8ef0844
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5FloatWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_FLOAT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5FloatWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5FloatWriter extends HDF5FloatReader implements IHDF5FloatWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5FloatWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final float value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_IEEE_F32LE,
+                                        H5T_NATIVE_FLOAT, dataSpaceId, new float[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_IEEE_F32LE,
+                                        H5T_NATIVE_FLOAT, -1, new float[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final float[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_IEEE_F32LE, H5T_NATIVE_FLOAT,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_FLOAT, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_IEEE_F32LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDFloatArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_IEEE_F32LE, H5T_NATIVE_FLOAT,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_FLOAT, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_IEEE_F32LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final float[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDFloatArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final float value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_IEEE_F32LE, H5T_NATIVE_FLOAT, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final float[] data)
+    {
+        writeArray(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final float[] data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_IEEE_F32LE, new long[]
+                                { data.length }, 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                            features, new long[] { 0 }, new long[] { size }, 4, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                            features, new long[] { size }, null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final float[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final float[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final float[][] data)
+    {
+        writeMatrix(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final float[][] data, 
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDFloatArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final float[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDFloatArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final float[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final float[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDFloatArray data)
+    {
+        writeMDArray(objectPath, data, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDFloatArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDFloatArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDFloatArray data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_IEEE_F32LE, 
+                                    data.longDimensions(), 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, FLOAT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 4, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                                features, MDArray.toLong(dimensions), null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5FloatStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_IEEE_F32LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDFloatArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDFloatArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDFloatArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDFloatArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDFloatArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDFloatArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5GenericStorageFeatures.java b/source/java/ch/systemsx/cisd/hdf5/HDF5GenericStorageFeatures.java
new file mode 100644
index 0000000..c2e0f05
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5GenericStorageFeatures.java
@@ -0,0 +1,546 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+/**
+ * An object representing the storage features that are to be used for a data set.
+ * <p>
+ * The <code>..._KEEP</code> variants denote that the specified storage features should only be
+ * applied if a new data set has to be created. If the data set already exists, it will be kept with
+ * whatever storage features it has.
+ * <em>Note that this may lead to an exception if the existing data set is non-extendable and the 
+ * dimensions of the new data set differ from the dimensions of the existing data set.</em>
+ * <p>
+ * The <code>..._DELETE</code> variants denote that the specified storage features should always be
+ * applied. If the data set already exists, it will be deleted before the new data set is written.
+ * This is the default behavior. However, if the
+ * {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()} setting is given, the
+ * <code>..._DELETE</code> variant can be used to override this setting on a case-by-case basis.
+ * <p>
+ * The available storage layouts are {@link HDF5StorageLayout#COMPACT},
+ * {@link HDF5StorageLayout#CONTIGUOUS} or {@link HDF5StorageLayout#CHUNKED} can be chosen. Only
+ * {@link HDF5StorageLayout#CHUNKED} is extendable and can be compressed.
+ * <p>
+ * For generic (that is non-integer and non-float) data sets only one type of compression is
+ * supported, which is <i>deflation</i>, the method used by <code>gzip</code>. The deflation level
+ * can be chosen to get the right balance between speed of compression and compression ratio. Often
+ * the {@link #DEFAULT_DEFLATION_LEVEL} will be the right choice.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5GenericStorageFeatures extends HDF5AbstractStorageFeatures
+{
+    /**
+     * Represents 'no compression'.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_NO_COMPRESSION =
+            new HDF5GenericStorageFeatures(null, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'no compression'.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_NO_COMPRESSION_KEEP =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'no compression'.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_NO_COMPRESSION_DELETE =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_COMPACT =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.COMPACT, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_COMPACT_KEEP =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.COMPACT,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_COMPACT_DELETE =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.COMPACT,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CONTIGUOUS =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CONTIGUOUS, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CONTIGUOUS_KEEP =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CONTIGUOUS_DELETE =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CHUNKED =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CHUNKED, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CHUNKED_KEEP =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CHUNKED,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents a chunked storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_CHUNKED_DELETE =
+            new HDF5GenericStorageFeatures(HDF5StorageLayout.CHUNKED,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE =
+            new HDF5GenericStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression' with a pre-filter shuffle, that is deflation with the
+     * default deflation level.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_SHUFFLE_DEFLATE =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.USE_WRITER_DEFAULT, true,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE_KEEP =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE_DELETE =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE_MAX =
+            new HDF5GenericStorageFeatures(null, MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression' with a pre-filter shuffle, that is deflation with the
+     * default deflation level.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_SHUFFLE_DEFLATE_MAX =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.USE_WRITER_DEFAULT, true,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE_MAX_KEEP =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5GenericStorageFeatures GENERIC_DEFLATE_MAX_DELETE =
+            new HDF5GenericStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR);
+
+    /**
+     * Creates a {@link HDF5GenericStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    public static HDF5GenericStorageFeatures createDeflation(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, false);
+    }
+
+    /**
+     * Creates a {@link HDF5GenericStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5GenericStorageFeatures createDeflationKeep(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, true);
+    }
+
+    /**
+     * Creates a {@link HDF5GenericStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    private static HDF5GenericStorageFeatures createDeflation(int deflationLevel,
+            boolean keepDataSetIfExists)
+    {
+        if (deflationLevel == NO_DEFLATION_LEVEL)
+        {
+            return GENERIC_NO_COMPRESSION;
+        } else if (deflationLevel == DEFAULT_DEFLATION_LEVEL)
+        {
+            return GENERIC_DEFLATE;
+        } else if (deflationLevel == MAX_DEFLATION_LEVEL)
+        {
+            return GENERIC_DEFLATE_MAX;
+        } else
+        {
+            return new HDF5GenericStorageFeatures(null, getDataSetReplacementPolicy(
+                    keepDataSetIfExists, false), toByte(deflationLevel), NO_SCALING_FACTOR);
+        }
+    }
+
+    /**
+     * Legacy method for specifying the compression as a boolean value.
+     */
+    static HDF5GenericStorageFeatures getCompression(boolean deflate)
+    {
+        return deflate ? GENERIC_DEFLATE : GENERIC_NO_COMPRESSION;
+    }
+
+    /**
+     * A builder for storage features.
+     */
+    public static final class HDF5GenericStorageFeatureBuilder extends
+            HDF5AbstractStorageFeatureBuilder
+    {
+        public HDF5GenericStorageFeatureBuilder()
+        {
+        }
+
+        public HDF5GenericStorageFeatureBuilder(HDF5AbstractStorageFeatures template)
+        {
+            super(template);
+            noScaling();
+        }
+
+        /**
+         * Compresses the dataset with default deflation level, if <code>compress==true</code>, do
+         * not compress if <code>compress==false</code>.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder compress(boolean compress)
+        {
+            super.compress(compress);
+            return this;
+        }
+
+        /**
+         * Compress the dataset with default deflation level.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder compress()
+        {
+            super.compress();
+            return this;
+        }
+
+        /**
+         * Compresses this dataset with the given <var>deflateLevel</var>.
+         * {@link #NO_DEFLATION_LEVEL} means: do not compress. A good default value is
+         * {@link #DEFAULT_DEFLATION_LEVEL}, the maximum value supported is
+         * {@link #MAX_DEFLATION_LEVEL}.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder deflateLevel(byte deflateLevel)
+        {
+            super.deflateLevel(deflateLevel);
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation if <code>shuffleBeforeDeflate==true</code> and
+         * disables it if <code>shuffleBeforeDeflate==false</code>. The shuffling pre-filter may
+         * improve the compression level but may also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder shuffleBeforeDeflate(boolean shuffleBeforeDeflate)
+        {
+            super.shuffleBeforeDeflate(shuffleBeforeDeflate);
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation. This may improve the compression level but may
+         * also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @see #compress()
+         * @see #deflateLevel(byte)
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder shuffleBeforeDeflate()
+        {
+            super.shuffleBeforeDeflate();
+            return this;
+        }
+
+        /**
+         * Set the layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder storageLayout(HDF5StorageLayout proposedLayout)
+        {
+            super.storageLayout(proposedLayout);
+            return this;
+        }
+
+        /**
+         * Set a compact layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder compactStorageLayout()
+        {
+            super.compactStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a contiguous layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder contiguousStorageLayout()
+        {
+            super.contiguousStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a chunked layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder chunkedStorageLayout()
+        {
+            super.chunkedStorageLayout();
+            return this;
+        }
+
+        /**
+         * Let a heuristic choose the right layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder defaultStorageLayout()
+        {
+            this.defaultStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder datasetReplacementPolicy(
+                DataSetReplacementPolicy datasetReplacementPolicy)
+        {
+            super.datasetReplacementPolicy(datasetReplacementPolicy);
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#USE_WRITER_DEFAULT}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder datasetReplacementUseWriterDefault()
+        {
+            super.datasetReplacementUseWriterDefault();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_KEEP_EXISTING}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder datasetReplacementEnforceKeepExisting()
+        {
+            super.datasetReplacementEnforceKeepExisting();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_REPLACE_WITH_NEW}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5GenericStorageFeatureBuilder datasetReplacementEnforceReplaceWithNew()
+        {
+            super.datasetReplacementEnforceReplaceWithNew();
+            return this;
+        }
+
+        /**
+         * Returns the storage features corresponding to this builder's values.
+         */
+        @Override
+        public HDF5GenericStorageFeatures features()
+        {
+            return new HDF5GenericStorageFeatures(this);
+        }
+    }
+
+    /**
+     * Returns a new storage feature builder.
+     */
+    public static HDF5GenericStorageFeatureBuilder build()
+    {
+        return new HDF5GenericStorageFeatureBuilder();
+    }
+
+    /**
+     * Returns a new storage feature builder, initializing from <var>template</var>.
+     */
+    public static HDF5GenericStorageFeatureBuilder build(HDF5AbstractStorageFeatures template)
+    {
+        return new HDF5GenericStorageFeatureBuilder(template);
+    }
+
+    HDF5GenericStorageFeatures(HDF5GenericStorageFeatureBuilder builder)
+    {
+        super(builder.getStorageLayout(), builder.getDatasetReplacementPolicy(), builder
+                .isShuffleBeforeDeflate(), builder.getDeflateLevel(), builder.getScalingFactor());
+    }
+
+    HDF5GenericStorageFeatures(HDF5StorageLayout proposedLayoutOrNull, byte deflateLevel,
+            byte scalingFactor)
+    {
+        this(proposedLayoutOrNull, DataSetReplacementPolicy.USE_WRITER_DEFAULT, deflateLevel,
+                scalingFactor);
+    }
+
+    HDF5GenericStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, byte deflateLevel, byte scalingFactor)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, deflateLevel, scalingFactor);
+    }
+
+    HDF5GenericStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, boolean shuffleBeforeDeflate,
+            byte deflateLevel, byte scalingFactor)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, shuffleBeforeDeflate, deflateLevel,
+                scalingFactor);
+    }
+
+    /**
+     * Returns true, if this compression setting can be applied on the given <var>dataClassId</var>.
+     */
+    @Override
+    boolean isCompatibleWithDataClass(int dataClassId)
+    {
+        return true;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5IntReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5IntReader.java
new file mode 100644
index 0000000..d824590
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5IntReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT32;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5IntReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5IntReader implements IHDF5IntReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5IntReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public int getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Integer> getAttributeRunnable = new ICallableWithCleanUp<Integer>()
+            {
+                @Override
+                public Integer call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int[] data =
+                            baseReader.h5.readAttributeAsIntArray(attributeId, H5T_NATIVE_INT32, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public int[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> getAttributeRunnable =
+                new ICallableWithCleanUp<int[]>()
+                    {
+                        @Override
+                        public int[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getIntArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDIntArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDIntArray>()
+                    {
+                        @Override
+                        public MDIntArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getIntMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public int[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDIntArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public int read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Integer> readCallable = new ICallableWithCleanUp<Integer>()
+            {
+                @Override
+                public Integer call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int[] data = new int[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT32, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readIntArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private int[] readIntArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final int[] data = new int[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT32, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readIntArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private int[] readIntArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int[] data = new int[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT32, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDIntArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT32, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDIntArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT32, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public int[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final int[] data = new int[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT32, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public int[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public int[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDIntArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDIntArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDIntArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDIntArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDIntArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> readCallable = new ICallableWithCleanUp<MDIntArray>()
+            {
+                @Override
+                public MDIntArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readIntMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDIntArray readIntMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final int[] data = new int[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT32, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDIntArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readIntMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDIntArray readIntMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT32, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final int[] data = new int[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDIntArray(data, arrayDimensions);
+        } else
+        {
+            final int[] data =
+                    new int[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDIntArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDIntArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDIntArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> readCallable = new ICallableWithCleanUp<MDIntArray>()
+            {
+                @Override
+                public MDIntArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final int[] dataBlock = new int[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT32,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDIntArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDIntArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final int[] dataBlock =
+                new int[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT32, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDIntArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<int[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<int[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<int[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<int[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<int[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final int[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<int[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDIntArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDIntArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDIntArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDIntArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDIntArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDIntArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDIntArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    int[] getIntArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_INT32, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_INT32;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final int[] data =
+                baseReader.h5.readAttributeAsIntArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDIntArray getIntMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_INT32,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_INT32;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final int[] data =
+                    baseReader.h5.readAttributeAsIntArray(attributeId,
+                            memoryTypeId, len);
+            return new MDIntArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5IntStorageFeatures.java b/source/java/ch/systemsx/cisd/hdf5/HDF5IntStorageFeatures.java
new file mode 100644
index 0000000..4366f2d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5IntStorageFeatures.java
@@ -0,0 +1,1231 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_INTEGER;
+
+/**
+ * An object representing the storage features that are to be used for an integer data set.
+ * <p>
+ * The <code>..._KEEP</code> variants denote that the specified storage features should only be
+ * applied if a new data set has to be created. If the data set already exists, it will be kept with
+ * whatever storage features it has.
+ * <em>Note that this may lead to an exception if the existing data set is non-extendable and the 
+ * dimensions of the new data set differ from the dimensions of the existing data set.</em>
+ * <p>
+ * The <code>..._DELETE</code> variants denote that the specified storage features should always be
+ * applied. If the data set already exists, it will be deleted before the new data set is written.
+ * This is the default behavior. However, if the
+ * {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()} setting is given, the
+ * <code>..._DELETE</code> variant can be used to override this setting on a case-by-case basis.
+ * <p>
+ * The available storage layouts are {@link HDF5StorageLayout#COMPACT},
+ * {@link HDF5StorageLayout#CONTIGUOUS} or {@link HDF5StorageLayout#CHUNKED} can be chosen. Only
+ * {@link HDF5StorageLayout#CHUNKED} is extendable and can be compressed.
+ * <p>
+ * Two types of compressions are supported: <i>deflation</i> (the method used by <code>gzip</code>)
+ * and <i>scaling</i>, which can be used if the accuracy of the values are smaller than what the
+ * atomic data type can store. <b>Note that <i>scaling</i> can be a lossy compression</b> while
+ * <i>deflation</i> is always lossless. <i>Scaling</i> compression is only available with HDF5 1.8
+ * and newer. Trying to use <i>scaling</i> in strict HDF5 1.6 compatibility mode will throw an
+ * {@link IllegalStateException}.
+ * <p>
+ * For <i>deflation</i> the deflation level can be chosen to get the right balance between speed of
+ * compression and compression ratio. Often the {@link #DEFAULT_DEFLATION_LEVEL} will be the right
+ * choice.
+ * <p>
+ * For <i>scaling</i>, the scaling factor can be chosen that determines the accuracy of the values
+ * saved. For float values, the scaling factor determines the number of significant digits of the
+ * numbers. The algorithm used for scale compression is:
+ * <ol>
+ * <li>Calculate the minimum value of all values</li>
+ * <li>Subtract the minimum value from all values</li>
+ * <li>Store the number of bits specified as <code>scalingFactor</code></li>
+ * </ol>
+ * Note that this compression is lossless if
+ * <code>{@literal scalineFactor >= ceil(log2(max(values) - min(values) + 1)}</code>. This in made
+ * sure when using {@link #INT_AUTO_SCALING}, thus {@link #INT_AUTO_SCALING} is always losless.
+ * <p>
+ * <b>Contrary to float scaling compression, a lossy integer scaling compression is usually an error
+ * as the most significant bits are chopped of!</b> The option to specify the scaling factor is
+ * meant to give you a way to use that you <i>know</i> the span of the values
+ * <code>{@literal max(values) - min(values)}</code> rather than asking the library to waste time on
+ * computing it for you.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5IntStorageFeatures extends HDF5AbstractStorageFeatures
+{
+
+    /**
+     * Perform an automatic scaling on integer data.
+     */
+    private final static byte INTEGER_AUTO_SCALING_FACTOR = 0;
+
+    /**
+     * Represents 'no compression', signed integers, use default storage layout.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION = new HDF5IntStorageFeatures(
+            null, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'no compression', unsigned integers, use default storage layout.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION_UNSIGNED =
+            new HDF5IntStorageFeatures(null, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'no compression', use default storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'no compression', unsigned integers, use default storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'no compression', use default storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'no compression', unsigned integers, use default storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_NO_COMPRESSION_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a compact storage layout.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.COMPACT, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a compact storage layout with unsigned integers.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT_UNSIGNED = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.COMPACT, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT_KEEP = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.COMPACT, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a compact storage layout with unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.COMPACT,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a compact storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT_DELETE = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.COMPACT, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a compact storage layout with unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_COMPACT_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.COMPACT,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a contiguous storage layout.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CONTIGUOUS, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a contiguous storage layout with unsigned integers.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS_UNSIGNED =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.CONTIGUOUS, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS_KEEP = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CONTIGUOUS, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a contiguous storage layout with unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a contiguous storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS_DELETE = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CONTIGUOUS, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a contiguous storage layout with unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_CONTIGUOUS_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.CONTIGUOUS,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a chunked (extendable) storage layout.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CHUNKED, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a chunked (extendable) storage layout with unsigned integers.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED_UNSIGNED = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CHUNKED, NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a chunked (extendable) storage layout.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED_KEEP = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CHUNKED, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a chunked (extendable) storage layout with unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.CHUNKED,
+                    DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents a chunked (extendable) storage layout.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED_DELETE = new HDF5IntStorageFeatures(
+            HDF5StorageLayout.CHUNKED, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+            NO_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents a chunked (extendable) storage layout with unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_CHUNKED_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(HDF5StorageLayout.CHUNKED,
+                    DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, NO_DEFLATION_LEVEL,
+                    NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE = new HDF5IntStorageFeatures(null,
+            DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'standard compression' with a pre-filter shuffle, that is deflation with the
+     * default deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_SHUFFLE_DEFLATE = new HDF5IntStorageFeatures(
+            null, DataSetReplacementPolicy.USE_WRITER_DEFAULT, true, DEFAULT_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'standard compression' with unsigned integers, that is deflation with the default
+     * deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_UNSIGNED = new HDF5IntStorageFeatures(
+            null, DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_KEEP = new HDF5IntStorageFeatures(null,
+            DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, DEFAULT_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'standard compression' with unsigned integers, that is deflation with the default
+     * deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'standard compression', that is deflation with the default deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_DELETE = new HDF5IntStorageFeatures(
+            null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, DEFAULT_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'standard compression' with unsigned integers, that is deflation with the default
+     * deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX = new HDF5IntStorageFeatures(null,
+            MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'maximal compression' with unsigned integers, that is deflation with the maximal
+     * deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX_UNSIGNED =
+            new HDF5IntStorageFeatures(null, MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX_KEEP = new HDF5IntStorageFeatures(
+            null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, MAX_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'maximal compression' with unsigned integers, that is deflation with the maximal
+     * deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents 'maximal compression', that is deflation with the maximal deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX_DELETE = new HDF5IntStorageFeatures(
+            null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW, MAX_DEFLATION_LEVEL,
+            NO_SCALING_FACTOR, true);
+
+    /**
+     * Represents 'maximal compression' with unsigned integers, that is deflation with the maximal
+     * deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_DEFLATE_MAX_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    MAX_DEFLATION_LEVEL, NO_SCALING_FACTOR, false);
+
+    /**
+     * Represents automatic scaling for integer values.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING = new HDF5IntStorageFeatures(null,
+            NO_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, true);
+
+    /**
+     * Represents automatic scaling for integer values with unsigned integers.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_UNSIGNED =
+            new HDF5IntStorageFeatures(null, NO_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, false);
+
+    /**
+     * Represents automatic scaling for integer values.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_KEEP = new HDF5IntStorageFeatures(
+            null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING, NO_DEFLATION_LEVEL,
+            INTEGER_AUTO_SCALING_FACTOR, true);
+
+    /**
+     * Represents automatic scaling for integer values with unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    NO_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, false);
+
+    /**
+     * Represents automatic scaling for integer values.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, true);
+
+    /**
+     * Represents automatic scaling for integer values with unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    NO_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, false);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE =
+            new HDF5IntStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR,
+                    true);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level, using unsigned integers.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE_UNSIGNED =
+            new HDF5IntStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR,
+                    false);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, true);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level, using unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE_UNSIGNED_KEEP =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                    DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, false);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, true);
+
+    /**
+     * Represents automatic scaling for integer values combined with deflation with the default
+     * deflation level, using unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static final HDF5IntStorageFeatures INT_AUTO_SCALING_DEFLATE_UNSIGNED_DELETE =
+            new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_REPLACE_WITH_NEW,
+                    DEFAULT_DEFLATION_LEVEL, INTEGER_AUTO_SCALING_FACTOR, false);
+
+    /**
+     * A builder for storage features.
+     */
+    public static final class HDF5IntStorageFeatureBuilder extends
+            HDF5AbstractStorageFeatureBuilder
+    {
+        private boolean signed = true;
+
+        public HDF5IntStorageFeatureBuilder()
+        {
+        }
+
+        public HDF5IntStorageFeatureBuilder(HDF5AbstractStorageFeatures template)
+        {
+            super(template);
+            if (template instanceof HDF5IntStorageFeatures)
+            {
+                signed(((HDF5IntStorageFeatures) template).isSigned());
+            }
+        }
+
+        boolean isSigned()
+        {
+            return signed;
+        }
+
+        /**
+         * Sets that the integer values should be stored as signed integers if
+         * <code>signed==true</code> and as unsigned integers if <code>signed==false</code>.
+         */
+        public HDF5IntStorageFeatureBuilder signed(@SuppressWarnings("hiding")
+        boolean signed)
+        {
+            this.signed = signed;
+            return this;
+        }
+
+        /**
+         * Sets that the integer values should be stored as unsigned integers.
+         */
+        public HDF5IntStorageFeatureBuilder unsigned()
+        {
+            this.signed = false;
+            return this;
+        }
+
+        /**
+         * Compresses the dataset with default deflation level, if <code>compress==true</code>, do
+         * not compress if <code>compress==false</code>.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder compress(boolean compress)
+        {
+            super.compress(compress);
+            return this;
+        }
+
+        /**
+         * Compress the dataset with default deflation level.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder compress()
+        {
+            super.compress();
+            return this;
+        }
+
+        /**
+         * Compresses this dataset with the given <var>deflateLevel</var>.
+         * {@link #NO_DEFLATION_LEVEL} means: do not compress. A good default value is
+         * {@link #DEFAULT_DEFLATION_LEVEL}, the maximum value supported is
+         * {@link #MAX_DEFLATION_LEVEL}.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder deflateLevel(byte deflateLevel)
+        {
+            super.deflateLevel(deflateLevel);
+            return this;
+        }
+
+        /**
+         * Sets the scaling factor for an integer scaling pre-filter.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder scalingFactor(byte scalingFactor)
+        {
+            super.scalingFactor(scalingFactor);
+            return this;
+        }
+
+        /**
+         * Disables the scaling pre-filter.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder noScaling()
+        {
+            super.noScaling();
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation if <code>shuffleBeforeDeflate==true</code> and
+         * disables it if <code>shuffleBeforeDeflate==false</code>. Theshuffling pre-filter may
+         * improve the compression level but may also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder shuffleBeforeDeflate(boolean shuffleBeforeDeflate)
+        {
+            super.shuffleBeforeDeflate(shuffleBeforeDeflate);
+            return this;
+        }
+
+        /**
+         * Sets a shuffling pre-filter for deflation. This may improve the compression level but may
+         * also increase the compression time.
+         * <p>
+         * Only takes effect if compression is switched on.
+         * 
+         * @see #compress()
+         * @see #deflateLevel(byte)
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder shuffleBeforeDeflate()
+        {
+            super.shuffleBeforeDeflate();
+            return this;
+        }
+
+        /**
+         * Set the layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder storageLayout(HDF5StorageLayout proposedLayout)
+        {
+            super.storageLayout(proposedLayout);
+            return this;
+        }
+
+        /**
+         * Set a compact layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder compactStorageLayout()
+        {
+            super.compactStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a contiguous layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder contiguousStorageLayout()
+        {
+            super.contiguousStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set a chunked layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder chunkedStorageLayout()
+        {
+            super.chunkedStorageLayout();
+            return this;
+        }
+
+        /**
+         * Let a heuristic choose the right layout for the dataset.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5AbstractStorageFeatureBuilder defaultStorageLayout()
+        {
+            this.defaultStorageLayout();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets.
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder datasetReplacementPolicy(
+                DataSetReplacementPolicy datasetReplacementPolicy)
+        {
+            super.datasetReplacementPolicy(datasetReplacementPolicy);
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#USE_WRITER_DEFAULT}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder datasetReplacementUseWriterDefault()
+        {
+            super.datasetReplacementUseWriterDefault();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_KEEP_EXISTING}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder datasetReplacementEnforceKeepExisting()
+        {
+            super.datasetReplacementEnforceKeepExisting();
+            return this;
+        }
+
+        /**
+         * Set the dataset replacement policy for existing datasets to
+         * {@link ch.systemsx.cisd.hdf5.HDF5AbstractStorageFeatures.DataSetReplacementPolicy#ENFORCE_REPLACE_WITH_NEW}
+         * .
+         * 
+         * @return This builder.
+         */
+        @Override
+        public HDF5IntStorageFeatureBuilder datasetReplacementEnforceReplaceWithNew()
+        {
+            super.datasetReplacementEnforceReplaceWithNew();
+            return this;
+        }
+
+        /**
+         * Returns the storage features corresponding to this builder's values.
+         */
+        @Override
+        public HDF5IntStorageFeatures features()
+        {
+            return new HDF5IntStorageFeatures(this);
+        }
+    }
+
+    /**
+     * Returns a new storage feature builder.
+     */
+    public static HDF5IntStorageFeatureBuilder build()
+    {
+        return new HDF5IntStorageFeatureBuilder();
+    }
+
+    /**
+     * Returns a new storage feature builder, initializing from <var>template</var>.
+     */
+    public static HDF5IntStorageFeatureBuilder build(HDF5AbstractStorageFeatures template)
+    {
+        return new HDF5IntStorageFeatureBuilder(template);
+    }
+
+    /**
+     * Create a corresponding {@link HDF5IntStorageFeatures} for the given
+     * {@link HDF5GenericStorageFeatures}.
+     */
+    public static HDF5IntStorageFeatures createFromGeneric(
+            HDF5GenericStorageFeatures storageFeatures)
+    {
+        if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX_KEEP;
+        } else
+        {
+            return new HDF5IntStorageFeatures(storageFeatures.tryGetProposedLayout(),
+                    storageFeatures.getDatasetReplacementPolicy(),
+                    storageFeatures.getDeflateLevel(), NO_SCALING_FACTOR, true);
+        }
+    }
+
+    /**
+     * Create a corresponding {@link HDF5IntStorageFeatures} for the given
+     * {@link HDF5GenericStorageFeatures}.
+     */
+    public static HDF5IntStorageFeatures createUnsignedFromGeneric(
+            HDF5GenericStorageFeatures storageFeatures)
+    {
+        if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CHUNKED_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_CHUNKED_UNSIGNED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_COMPACT_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_COMPACT_UNSIGNED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_CONTIGUOUS_UNSIGNED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_NO_COMPRESSION_UNSIGNED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_UNSIGNED_KEEP;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX_UNSIGNED;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_DELETE)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX_UNSIGNED_DELETE;
+        } else if (storageFeatures == HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX_KEEP)
+        {
+            return HDF5IntStorageFeatures.INT_DEFLATE_MAX_UNSIGNED_KEEP;
+        } else
+        {
+            return new HDF5IntStorageFeatures(storageFeatures.tryGetProposedLayout(),
+                    storageFeatures.getDatasetReplacementPolicy(),
+                    storageFeatures.getDeflateLevel(), NO_SCALING_FACTOR, true);
+        }
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflation(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, false, false, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createDeflationKeep(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, true, false, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static HDF5IntStorageFeatures createDeflationDelete(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, false, true, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflationUnsigned(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, false, false, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var> for unsigned integers.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createDeflationUnsignedKeep(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, true, false, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var> for unsigned integers.
+     * <p>
+     * Delete an existing data set before writing the new one. Always apply the chosen settings.
+     * This allows to overwrite the {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()}
+     * setting.
+     */
+    public static HDF5IntStorageFeatures createDeflationUnsignedDelete(int deflationLevel)
+    {
+        return createDeflation(deflationLevel, false, true, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflationLevel</var> for unsigned integers.
+     */
+    private static HDF5IntStorageFeatures createDeflation(int deflationLevel,
+            boolean keepDataSetIfExists, boolean deleteDataSetIfExists, boolean signed)
+    {
+        if (signed)
+        {
+            return new HDF5IntStorageFeatures(null, getDataSetReplacementPolicy(
+                    keepDataSetIfExists, deleteDataSetIfExists), toByte(deflationLevel),
+                    NO_SCALING_FACTOR, true);
+        } else
+        {
+            return new HDF5IntStorageFeatures(null, getDataSetReplacementPolicy(
+                    keepDataSetIfExists, deleteDataSetIfExists), toByte(deflationLevel),
+                    NO_SCALING_FACTOR, false);
+        }
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents integer scaling with the
+     * given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createIntegerScaling(int scalingFactor)
+    {
+        return createIntegerScaling(scalingFactor, false, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents integer scaling with the
+     * given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createIntegerScalingKeep(int scalingFactor)
+    {
+        return createIntegerScaling(scalingFactor, true, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents integer scaling with the
+     * given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createIntegerScalingUnsigned(int scalingFactor)
+    {
+        return createIntegerScaling(scalingFactor, false, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents integer scaling with the
+     * given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createIntegerScalingUnsigedKeep(int scalingFactor)
+    {
+        return createIntegerScaling(scalingFactor, true, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents integer scaling with the
+     * given <var>scalingFactor</var>.
+     */
+    private static HDF5IntStorageFeatures createIntegerScaling(int scalingFactor,
+            boolean keepExistingDataSetIfExists, boolean signed)
+    {
+        if (signed)
+        {
+            if (scalingFactor == INTEGER_AUTO_SCALING_FACTOR)
+            {
+                return keepExistingDataSetIfExists ? INT_AUTO_SCALING_DEFLATE_KEEP
+                        : INT_AUTO_SCALING_DEFLATE;
+            } else
+            {
+                return new HDF5IntStorageFeatures(null, NO_DEFLATION_LEVEL, toByte(scalingFactor),
+                        true);
+            }
+        } else
+        {
+            if (scalingFactor == INTEGER_AUTO_SCALING_FACTOR)
+            {
+                return keepExistingDataSetIfExists ? INT_AUTO_SCALING_DEFLATE_UNSIGNED_KEEP
+                        : INT_AUTO_SCALING_DEFLATE_UNSIGNED;
+            } else
+            {
+                return new HDF5IntStorageFeatures(null, NO_DEFLATION_LEVEL, toByte(scalingFactor),
+                        false);
+            }
+        }
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the default
+     * deflation level and integer scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScaling(int scalingFactor)
+    {
+        return new HDF5IntStorageFeatures(null, DEFAULT_DEFLATION_LEVEL, toByte(scalingFactor),
+                true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the default
+     * deflation level and integer scaling with the given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScalingKeep(int scalingFactor)
+    {
+        return new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                DEFAULT_DEFLATION_LEVEL, toByte(scalingFactor), true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and integer scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScaling(int deflateLevel,
+            byte scalingFactor)
+    {
+        return new HDF5IntStorageFeatures(null, toByte(deflateLevel), scalingFactor, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and integer scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScalingUnsigned(int deflateLevel,
+            byte scalingFactor)
+    {
+        return new HDF5IntStorageFeatures(null, toByte(deflateLevel), scalingFactor, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and integer scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScaling(int deflateLevel,
+            byte scalingFactor, boolean keepDataSetIfExists)
+    {
+        return new HDF5IntStorageFeatures(null, getDataSetReplacementPolicy(keepDataSetIfExists,
+                false), toByte(deflateLevel), scalingFactor, true);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and integer scaling with the given <var>scalingFactor</var>.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScalingUnsigned(int deflateLevel,
+            byte scalingFactor, boolean keepDataSetIfExists)
+    {
+        return new HDF5IntStorageFeatures(null, getDataSetReplacementPolicy(keepDataSetIfExists,
+                false), toByte(deflateLevel), scalingFactor, false);
+    }
+
+    /**
+     * Creates a {@link HDF5IntStorageFeatures} object that represents deflation with the given
+     * <var>deflateLevel</var> and integer scaling with the given <var>scalingFactor</var>.
+     * <p>
+     * Keep existing data set and apply only if a new data set has to be created.
+     */
+    public static HDF5IntStorageFeatures createDeflateAndIntegerScalingKeep(int deflateLevel,
+            byte scalingFactor)
+    {
+        return new HDF5IntStorageFeatures(null, DataSetReplacementPolicy.ENFORCE_KEEP_EXISTING,
+                toByte(deflateLevel), scalingFactor, true);
+    }
+
+    private final boolean signed;
+
+    HDF5IntStorageFeatures(HDF5IntStorageFeatureBuilder builder)
+    {
+        super(builder.getStorageLayout(), builder.getDatasetReplacementPolicy(), builder
+                .isShuffleBeforeDeflate(), builder.getDeflateLevel(), builder.getScalingFactor());
+        this.signed = builder.isSigned();
+    }
+
+    HDF5IntStorageFeatures(HDF5StorageLayout proposedLayoutOrNull, byte deflateLevel,
+            byte scalingFactor, boolean signed)
+    {
+        this(proposedLayoutOrNull, DataSetReplacementPolicy.USE_WRITER_DEFAULT, deflateLevel,
+                scalingFactor, signed);
+    }
+
+    HDF5IntStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, byte deflateLevel,
+            byte scalingFactor, boolean signed)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, deflateLevel, scalingFactor);
+        this.signed = signed;
+    }
+
+    HDF5IntStorageFeatures(HDF5StorageLayout proposedLayoutOrNull,
+            DataSetReplacementPolicy dataSetReplacementPolicy, boolean shuffleBeforeDeflate,
+            byte deflateLevel, byte scalingFactor, boolean signed)
+    {
+        super(proposedLayoutOrNull, dataSetReplacementPolicy, shuffleBeforeDeflate, deflateLevel,
+                scalingFactor);
+        this.signed = signed;
+    }
+
+    /**
+     * Returns <code>true</code> if signed integers should be stored, <code>false</code> otherwise.
+     */
+    public boolean isSigned()
+    {
+        return signed;
+    }
+
+    /**
+     * Returns true, if this compression setting can be applied on the given <var>dataClassId</var>.
+     */
+    @Override
+    boolean isCompatibleWithDataClass(int dataClassId)
+    {
+        return (dataClassId == H5T_INTEGER);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5IntWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5IntWriter.java
new file mode 100644
index 0000000..64b335f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5IntWriter.java
@@ -0,0 +1,703 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT32;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5IntWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5IntWriter extends HDF5IntReader implements IHDF5IntWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5IntWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final int value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I32LE,
+                                        H5T_NATIVE_INT32, dataSpaceId, new int[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I32LE,
+                                        H5T_NATIVE_INT32, -1, new int[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final int[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I32LE, H5T_NATIVE_INT32,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT32, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I32LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDIntArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I32LE, H5T_NATIVE_INT32,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT32, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I32LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final int[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDIntArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final int value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_I32LE, H5T_NATIVE_INT32, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final int[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final int[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, new long[]
+                                { data.length }, 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT32, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                            features, new long[] { 0 }, new long[] { size }, 4, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                            features, new long[] { size }, null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final int[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final int[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT32, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final int[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final int[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDIntArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final int[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDIntArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDIntArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDIntArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDIntArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDIntArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                                    data.longDimensions(), 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT32, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 4, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                                features, MDArray.toLong(dimensions), null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDIntArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDIntArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDIntArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT32, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT32, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5LinkInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5LinkInformation.java
new file mode 100644
index 0000000..7a87fdc
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5LinkInformation.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+/**
+ * Information about a link in an HDF5 file.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5LinkInformation extends HDF5CommonInformation
+{
+    static final HDF5LinkInformation ROOT_LINK_INFO =
+            new HDF5LinkInformation("/", HDF5ObjectType.GROUP, null);
+
+    private final String symbolicLinkTargetOrNull;
+
+    private HDF5LinkInformation(String path, HDF5ObjectType type, String symbolicLinkTargetOrNull)
+    {
+        super(path, type);
+        this.symbolicLinkTargetOrNull = symbolicLinkTargetOrNull;
+    }
+
+    static HDF5LinkInformation create(String path, int typeId, String symbolicLinkTargetOrNull)
+    {
+        final HDF5ObjectType type = objectTypeIdToObjectType(typeId);
+        return new HDF5LinkInformation(path, type, symbolicLinkTargetOrNull);
+    }
+
+    /**
+     * Returns the symbolic link target of this link, or <code>null</code>, if this link does not
+     * exist or is not a symbolic link.
+     * <p>
+     * Note that external links have a special format: They start with a prefix "
+     * <code>EXTERNAL::</code>", then comes the path of the external file (beware that this part
+     * uses the native path separator, i.e. "\" on Windows). Finally, separated by "<code>::</code>
+     * ", the path of the link in the external file is provided (this part always uses "/" as path
+     * separator).
+     */
+    public String tryGetSymbolicLinkTarget()
+    {
+        return symbolicLinkTargetOrNull;
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is a soft link.
+     */
+    public boolean isSoftLink()
+    {
+        return HDF5ObjectType.isSoftLink(type);
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is an external link.
+     */
+    public boolean isExternalLink()
+    {
+        return HDF5ObjectType.isExternalLink(type);
+    }
+
+    /**
+     * Returns <code>true</code>, if the link is either a soft link or an external link.
+     */
+    public boolean isSymbolicLink()
+    {
+        return HDF5ObjectType.isSymbolicLink(type);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((path == null) ? 0 : path.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final HDF5LinkInformation other = (HDF5LinkInformation) obj;
+        if (path == null)
+        {
+            if (other.path != null)
+            {
+                return false;
+            }
+        } else if (path.equals(other.path) == false)
+        {
+            return false;
+        }
+        return true;
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5LongReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5LongReader.java
new file mode 100644
index 0000000..f00c12e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5LongReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5LongReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5LongReader implements IHDF5LongReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5LongReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public long getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> getAttributeRunnable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final long[] data =
+                            baseReader.h5.readAttributeAsLongArray(attributeId, H5T_NATIVE_INT64, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public long[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> getAttributeRunnable =
+                new ICallableWithCleanUp<long[]>()
+                    {
+                        @Override
+                        public long[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getLongArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDLongArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getLongMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public long[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDLongArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public long read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> readCallable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final long[] data = new long[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readLongArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private long[] readLongArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final long[] data = new long[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readLongArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private long[] readLongArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final long[] data = new long[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT64, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDLongArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDLongArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public long[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public long[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public long[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDLongArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDLongArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDLongArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDLongArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDLongArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+            {
+                @Override
+                public MDLongArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readLongMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDLongArray readLongMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final long[] data = new long[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDLongArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readLongMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDLongArray readLongMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT64, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final long[] data = new long[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDLongArray(data, arrayDimensions);
+        } else
+        {
+            final long[] data =
+                    new long[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDLongArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDLongArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDLongArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+            {
+                @Override
+                public MDLongArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final long[] dataBlock = new long[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDLongArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDLongArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final long[] dataBlock =
+                new long[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT64, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDLongArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<long[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<long[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<long[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<long[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<long[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final long[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<long[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDLongArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDLongArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDLongArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDLongArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDLongArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDLongArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    long[] getLongArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_INT64, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_INT64;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final long[] data =
+                baseReader.h5.readAttributeAsLongArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDLongArray getLongMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_INT64,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_INT64;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final long[] data =
+                    baseReader.h5.readAttributeAsLongArray(attributeId,
+                            memoryTypeId, len);
+            return new MDLongArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5LongWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5LongWriter.java
new file mode 100644
index 0000000..4ab93f0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5LongWriter.java
@@ -0,0 +1,703 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5LongWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5LongWriter extends HDF5LongReader implements IHDF5LongWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5LongWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final long value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I64LE,
+                                        H5T_NATIVE_INT64, dataSpaceId, new long[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I64LE,
+                                        H5T_NATIVE_INT64, -1, new long[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final long[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I64LE, H5T_NATIVE_INT64,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDLongArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I64LE, H5T_NATIVE_INT64,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final long[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDLongArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final long value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_I64LE, H5T_NATIVE_INT64, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, new long[]
+                                { data.length }, 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                            features, new long[] { 0 }, new long[] { size }, 8, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                            features, new long[] { size }, null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final long[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final long[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDLongArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final long[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDLongArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDLongArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDLongArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDLongArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDLongArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                                    data.longDimensions(), 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 8, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                                features, MDArray.toLong(dimensions), null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDLongArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5MDDataBlock.java b/source/java/ch/systemsx/cisd/hdf5/HDF5MDDataBlock.java
new file mode 100644
index 0000000..f6d013e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5MDDataBlock.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+
+/**
+ * A class that is used for iterating over a data set block by block, using
+ * <em>natural data blocks</em>. The <em>Natural block</em> for chunked data sets is a chunk, for
+ * non-chunked data sets it is the complete array.
+ * <p>
+ * The pattern for using this class is:
+ * 
+ * <pre>
+ * for (HDF5DataBlock<MDIntArray> block : reader.getIntMDNaturalBlocks(dsNameMD))
+ * {
+ *     MDIntArray naturalBlock = block.getData();
+ *     ... work on naturalBlock, use block.getIndex() or block.getOffset() where needed ...
+ * }
+ * </pre>
+ * 
+ * The iteration in the multi-dimensional case will be in C-order, that is last-index is iterated
+ * over first.
+ * <p>
+ * <b>Note:</b> If the size of the data set is not an integer number of blocks, then the last block
+ * will be smaller than the natural block size.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5MDDataBlock<T extends MDAbstractArray<?>>
+{
+
+    private final T data;
+
+    private final long[] offset;
+
+    private final long[] index;
+
+    HDF5MDDataBlock(T block, long[] index, long[] offset)
+    {
+        this.data = block;
+        this.index = index;
+        this.offset = offset;
+    }
+
+    /**
+     * Returns the data block itself.
+     */
+    public T getData()
+    {
+        return data;
+    }
+
+    /**
+     * Returns the offset in the data set for the current iteration in each dimension.
+     */
+    public long[] getOffset()
+    {
+        return offset;
+    }
+
+    /**
+     * Returns the iteration index of this block, starting with <code>{ 0, ..., 0 }</code>.
+     */
+    public long[] getIndex()
+    {
+        return index;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5MDEnumBlock.java b/source/java/ch/systemsx/cisd/hdf5/HDF5MDEnumBlock.java
new file mode 100644
index 0000000..54ae9cc
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5MDEnumBlock.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * A class that is used for iterating over an <code>Enum</code> data set block by block, using
+ * <em>natural data blocks</em>. The <em>Natural block</em> for chunked data sets is a chunk, for
+ * non-chunked data sets it is the complete array.
+ * <p>
+ * The pattern for using this class is:
+ * 
+ * <pre>
+ * for (HDF5MDEnumBlock block : reader.getEnumMDNaturalBlocks(dsNameMD))
+ * {
+ *     HDF5EnumerationValueMDArray naturalBlock = block.getData();
+ *     ... work on naturalBlock, use block.getIndex() or block.getOffset() where needed ...
+ * }
+ * </pre>
+ * 
+ * The iteration in the multi-dimensional case will be in C-order, that is last-index is iterated
+ * over first.
+ * <p>
+ * <b>Note:</b> If the size of the data set is not an integer number of blocks, then the last block
+ * will be smaller than the natural block size.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5MDEnumBlock
+{
+    private final HDF5EnumerationValueMDArray data;
+
+    private final long[] offset;
+
+    private final long[] index;
+
+    HDF5MDEnumBlock(HDF5EnumerationValueMDArray block, long[] index, long[] offset)
+    {
+        this.data = block;
+        this.index = index;
+        this.offset = offset;
+    }
+
+    /**
+     * Returns the data block itself.
+     */
+    public HDF5EnumerationValueMDArray getData()
+    {
+        return data;
+    }
+
+    /**
+     * Returns the offset in the data set for the current iteration in each dimension.
+     */
+    public long[] getOffset()
+    {
+        return offset;
+    }
+
+    /**
+     * Returns the iteration index of this block, starting with <code>{ 0, ..., 0 }</code>.
+     */
+    public long[] getIndex()
+    {
+        return index;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5MemberByteifyer.java b/source/java/ch/systemsx/cisd/hdf5/HDF5MemberByteifyer.java
new file mode 100644
index 0000000..c2d2bbd
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5MemberByteifyer.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5T.H5Tinsert;
+
+import java.lang.reflect.Field;
+
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class that byteifies member fields of objects.
+ * 
+ * @author Bernd Rinn
+ */
+abstract class HDF5MemberByteifyer
+{
+    private final Field fieldOrNull;
+
+    private final String memberName;
+
+    protected final int maxCharacters;
+
+    protected final int size;
+
+    protected final int offsetOnDisk;
+
+    protected final int offsetInMemory;
+
+    protected final CharacterEncoding encoding;
+
+    private final HDF5DataTypeVariant typeVariant;
+
+    private final boolean isVariableLengthType;
+
+    HDF5MemberByteifyer(Field fieldOrNull, String memberName, int size, int offset, int memOffset,
+            boolean isVariableLengthType, HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this(fieldOrNull, memberName, size, offset, memOffset, CharacterEncoding.ASCII, size,
+                isVariableLengthType, false, typeVariantOrNull);
+    }
+
+    HDF5MemberByteifyer(Field fieldOrNull, String memberName, int size, int offset, int memOffset,
+            CharacterEncoding encoding, int maxCharacters, boolean isVariableLengthType,
+            boolean isReferenceType)
+    {
+        this(fieldOrNull, memberName, size, offset, memOffset, encoding, maxCharacters,
+                isVariableLengthType, isReferenceType, HDF5DataTypeVariant.NONE);
+    }
+
+    private HDF5MemberByteifyer(Field fieldOrNull, String memberName, int size, int offset,
+            int memOffset, CharacterEncoding encoding, int maxCharacters,
+            boolean isVariableLengthType, boolean isReferenceType,
+            HDF5DataTypeVariant typeVariantOrNull)
+    {
+        this.isVariableLengthType = isVariableLengthType;
+        this.fieldOrNull = fieldOrNull;
+        this.memberName = memberName;
+        this.maxCharacters = maxCharacters;
+        if (isVariableLengthType)
+        {
+            this.size = HDFNativeData.getMachineWordSize();
+        } else if (isReferenceType)
+        {
+            this.size = HDF5BaseReader.REFERENCE_SIZE_IN_BYTES;
+        } else
+        {
+            this.size = size;
+        }
+        this.offsetOnDisk = offset;
+        this.offsetInMemory = PaddingUtils.padOffset(memOffset, getElementSize());
+        this.encoding = encoding;
+        this.typeVariant = HDF5DataTypeVariant.maskNull(typeVariantOrNull);
+    }
+
+    /**
+     * Returns the size of one element of this data type in bytes.
+     */
+    abstract int getElementSize();
+
+    abstract byte[] byteify(int compoundDataTypeId, Object obj) throws IllegalAccessException;
+
+    abstract void setFromByteArray(int compoundDataTypeId, Object obj, byte[] byteArr,
+            int arrayOffset) throws IllegalAccessException;
+
+    abstract int getMemberStorageTypeId();
+
+    /**
+     * Returns -1 if the native type id should be inferred from the storage type id
+     */
+    abstract int getMemberNativeTypeId();
+
+    HDF5EnumerationType tryGetEnumType()
+    {
+        return null;
+    }
+
+    void insertType(int dataTypeId)
+    {
+        H5Tinsert(dataTypeId, memberName, offsetOnDisk, getMemberStorageTypeId());
+    }
+
+    void insertNativeType(int dataTypeId, HDF5 h5, ICleanUpRegistry registry)
+    {
+        if (getMemberNativeTypeId() < 0)
+        {
+            H5Tinsert(dataTypeId, memberName, offsetInMemory,
+                    h5.getNativeDataType(getMemberStorageTypeId(), registry));
+        } else
+        {
+            H5Tinsert(dataTypeId, memberName, offsetInMemory, getMemberNativeTypeId());
+        }
+    }
+
+    String getMemberName()
+    {
+        return memberName;
+    }
+
+    Field tryGetField()
+    {
+        return fieldOrNull;
+    }
+
+    int getMaxCharacters()
+    {
+        return maxCharacters;
+    }
+
+    int getSize()
+    {
+        return size;
+    }
+
+    int getOffsetOnDisk()
+    {
+        return offsetOnDisk;
+    }
+
+    int getTotalSizeOnDisk()
+    {
+        return offsetOnDisk + size;
+    }
+
+    int getOffsetInMemory()
+    {
+        return offsetInMemory;
+    }
+
+    int getTotalSizeInMemory()
+    {
+        return offsetInMemory + size;
+    }
+
+    HDF5DataTypeVariant getTypeVariant()
+    {
+        return typeVariant;
+    }
+
+    boolean isVariableLengthType()
+    {
+        return isVariableLengthType;
+    }
+
+    String describe()
+    {
+        if (fieldOrNull != null)
+        {
+            return "field '" + fieldOrNull.getName() + "' of class '"
+                    + fieldOrNull.getDeclaringClass().getCanonicalName() + "'";
+        } else
+        {
+            return "member '" + memberName + "'";
+        }
+    }
+
+    boolean isDummy()
+    {
+        return false;
+    }
+
+    boolean mayBeCut()
+    {
+        return false;
+    }
+
+    @Override
+    public String toString()
+    {
+        return describe();
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlock1DParameters.java b/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlock1DParameters.java
new file mode 100644
index 0000000..3d6918e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlock1DParameters.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.NoSuchElementException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * A class for computing the parameters of one-dimensional natural blocks.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5NaturalBlock1DParameters
+{
+    private final int naturalBlockSize;
+
+    private final long numberOfBlocks;
+
+    private final int lastBlockSize;
+
+    final class HDF5NaturalBlock1DIndex
+    {
+        private long index = 0;
+
+        private long offset;
+
+        private int blockSize;
+
+        boolean hasNext()
+        {
+            return index < numberOfBlocks;
+        }
+
+        long computeOffsetAndSizeGetOffset()
+        {
+            if (hasNext() == false)
+            {
+                throw new NoSuchElementException();
+            }
+            offset = naturalBlockSize * index;
+            blockSize = (index == numberOfBlocks - 1) ? lastBlockSize : naturalBlockSize;
+            return offset;
+        }
+
+        int getBlockSize()
+        {
+            return blockSize;
+        }
+
+        long getAndIncIndex()
+        {
+            return index++;
+        }
+    }
+
+    HDF5NaturalBlock1DParameters(final HDF5DataSetInformation info)
+    {
+        if (info.getRank() > 1)
+        {
+            throw new HDF5JavaException("Data Set is expected to be of rank 1 (rank="
+                    + info.getRank() + ")");
+        }
+        final long size = info.getDimensions()[0];
+        naturalBlockSize =
+                (info.getStorageLayout() == HDF5StorageLayout.CHUNKED) ? info.tryGetChunkSizes()[0]
+                        : (int) size;
+        final int sizeModNaturalBlockSize = (int) (size % naturalBlockSize);
+        numberOfBlocks = (size / naturalBlockSize) + (sizeModNaturalBlockSize != 0 ? 1 : 0);
+        lastBlockSize = (sizeModNaturalBlockSize != 0) ? sizeModNaturalBlockSize : naturalBlockSize;
+    }
+
+    HDF5NaturalBlock1DIndex getNaturalBlockIndex()
+    {
+        return new HDF5NaturalBlock1DIndex();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlockMDParameters.java b/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlockMDParameters.java
new file mode 100644
index 0000000..bb1f6ed
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5NaturalBlockMDParameters.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.NoSuchElementException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+
+/**
+ * A class for computing the parameters of multi-dimensional natural blocks.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5NaturalBlockMDParameters
+{
+    private final int rank;
+
+    private final long[] numberOfBlocks;
+    
+    private final int[] naturalBlockSize;
+    
+    private final int[] lastBlockSize;
+
+    final class HDF5NaturalBlockMDIndex
+    {
+        private long[] index = new long[rank];
+
+        private long[] offset = new long[rank];
+
+        private int[] blockSize = naturalBlockSize.clone();
+
+        private boolean indexCalculated = true;
+
+        boolean hasNext()
+        {
+            if (indexCalculated)
+            {
+                return true;
+            }
+            for (int i = index.length - 1; i >= 0; --i)
+            {
+                ++index[i];
+                if (index[i] < numberOfBlocks[i])
+                {
+                    offset[i] += naturalBlockSize[i];
+                    if (index[i] == numberOfBlocks[i] - 1)
+                    {
+                        blockSize[i] = lastBlockSize[i];
+                    }
+                    indexCalculated = true;
+                    break;
+                } else
+                {
+                    index[i] = 0;
+                    offset[i] = 0;
+                    blockSize[i] = naturalBlockSize[i];
+                }
+            }
+            return indexCalculated;
+        }
+
+        long[] computeOffsetAndSizeGetOffsetClone()
+        {
+            if (hasNext() == false)
+            {
+                throw new NoSuchElementException();
+            }
+            indexCalculated = false;
+            return offset.clone();
+        }
+
+        int[] getBlockSize()
+        {
+            return blockSize;
+        }
+        
+        long[] getIndexClone()
+        {
+            return index.clone();
+        }
+    }
+
+    HDF5NaturalBlockMDParameters(final HDF5DataSetInformation info)
+    {
+        rank = info.getRank();
+        final long[] dimensions = info.getDimensions();
+        naturalBlockSize =
+                (info.getStorageLayout() == HDF5StorageLayout.CHUNKED) ? info.tryGetChunkSizes()
+                        : MDAbstractArray.toInt(dimensions);
+        numberOfBlocks = new long[rank];
+        lastBlockSize = new int[rank];
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            final int sizeModNaturalBlockSize = (int) (dimensions[i] % naturalBlockSize[i]);
+            numberOfBlocks[i] =
+                    (dimensions[i] / naturalBlockSize[i]) + (sizeModNaturalBlockSize != 0 ? 1 : 0);
+            lastBlockSize[i] =
+                    (sizeModNaturalBlockSize != 0) ? sizeModNaturalBlockSize : naturalBlockSize[i];
+        }
+    }
+
+    HDF5NaturalBlockMDIndex getNaturalBlockIndex()
+    {
+        return new HDF5NaturalBlockMDIndex();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectInformation.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectInformation.java
new file mode 100644
index 0000000..c809dbd
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectInformation.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * Information about an object in an HDF5 file.
+ * 
+ * @author Bernd Rinn
+ */
+public final class HDF5ObjectInformation extends HDF5CommonInformation
+{
+
+    private final long fileNumber;
+
+    private final long address;
+
+    private final int referenceCount;
+
+    private final long creationTime;
+
+    private final long numberOfAttributes;
+
+    HDF5ObjectInformation(String path, HDF5ObjectType objectType, long[] info)
+    {
+        super(path, objectType);
+        this.fileNumber = info[0];
+        this.address = info[1];
+        this.referenceCount = (int) info[2];
+        this.creationTime = info[3];
+        this.numberOfAttributes = info[4];
+    }
+
+    /**
+     * Returns the file number that the object is in. Can be useful when external links are
+     * involved.
+     */
+    public long getFileNumber()
+    {
+        return fileNumber;
+    }
+
+    /**
+     * Returns the address of the object in the file. If the address of two links is the same, then
+     * they point to the same object. Can be used to spot hard or soft links.
+     */
+    public long getAddress()
+    {
+        return address;
+    }
+
+    /**
+     * Returns the number of references that point to this object. (Number of hard links that point
+     * to the object).
+     */
+    public int getReferenceCount()
+    {
+        return referenceCount;
+    }
+
+    /**
+     * Returns the time of creation of this object (as number of seconds since start of the epoch).
+     * Note that this only works for data set, for groups, this will always return 0.
+     */
+    public long getCreationTime()
+    {
+        return creationTime;
+    }
+
+    /**
+     * Returns the number of attributes that is object has.
+     */
+    public long getNumberOfAttributes()
+    {
+        return numberOfAttributes;
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((path == null) ? 0 : path.hashCode());
+        result = prime * result + ((int) address);
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final HDF5ObjectInformation other = (HDF5ObjectInformation) obj;
+        if (path == null)
+        {
+            if (other.path != null)
+            {
+                return false;
+            }
+        } else if (path.equals(other.path) == false)
+        {
+            return false;
+        } 
+        if (other.address != address)
+        {
+            return false;
+        }
+        return true;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadOnlyInfoProviderHandler.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadOnlyInfoProviderHandler.java
new file mode 100644
index 0000000..cf86591
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadOnlyInfoProviderHandler.java
@@ -0,0 +1,519 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.removeInternalNames;
+
+import java.util.List;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@link IHDF5ObjectReadOnlyInfoProviderHandler}
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ObjectReadOnlyInfoProviderHandler implements IHDF5ObjectReadOnlyInfoProviderHandler
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5ObjectReadOnlyInfoProviderHandler(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // /////////////////////
+    // Objects & Links
+    // /////////////////////
+
+    @Override
+    public HDF5LinkInformation getLinkInformation(final String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.h5.getLinkInfo(baseReader.fileId, objectPath, false);
+    }
+
+    @Override
+    public HDF5ObjectInformation getObjectInformation(final String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.h5.getObjectInfo(baseReader.fileId, objectPath, false);
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(final String objectPath, boolean followLink)
+    {
+        baseReader.checkOpen();
+        if (followLink)
+        {
+            return baseReader.h5.getObjectTypeInfo(baseReader.fileId, objectPath, false);
+        } else
+        {
+            return baseReader.h5.getLinkTypeInfo(baseReader.fileId, objectPath, false);
+        }
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(final String objectPath)
+    {
+        return getObjectType(objectPath, true);
+    }
+
+    @Override
+    public boolean exists(final String objectPath, boolean followLink)
+    {
+        if (followLink == false)
+        {
+            // Optimization
+            baseReader.checkOpen();
+            if ("/".equals(objectPath))
+            {
+                return true;
+            }
+            return baseReader.h5.exists(baseReader.fileId, objectPath);
+        } else
+        {
+            return exists(objectPath);
+        }
+    }
+
+    @Override
+    public boolean exists(final String objectPath)
+    {
+        baseReader.checkOpen();
+        if ("/".equals(objectPath))
+        {
+            return true;
+        }
+        return baseReader.h5.getObjectTypeId(baseReader.fileId, objectPath, false) >= 0;
+    }
+
+    @Override
+    public String toHouseKeepingPath(String objectPath)
+    {
+        return HDF5Utils.toHouseKeepingPath(objectPath, baseReader.houseKeepingNameSuffix);
+    }
+
+    @Override
+    public boolean isHouseKeepingObject(String objectPath)
+    {
+        return HDF5Utils.isInternalName(objectPath, baseReader.houseKeepingNameSuffix);
+    }
+
+    @Override
+    public boolean isGroup(final String objectPath, boolean followLink)
+    {
+        return HDF5ObjectType.isGroup(getObjectType(objectPath, followLink));
+    }
+
+    @Override
+    public boolean isGroup(final String objectPath)
+    {
+        return HDF5ObjectType.isGroup(getObjectType(objectPath));
+    }
+
+    @Override
+    public boolean isDataSet(final String objectPath, boolean followLink)
+    {
+        return HDF5ObjectType.isDataSet(getObjectType(objectPath, followLink));
+    }
+
+    @Override
+    public boolean isDataSet(final String objectPath)
+    {
+        return HDF5ObjectType.isDataSet(getObjectType(objectPath));
+    }
+
+    @Override
+    public boolean isDataType(final String objectPath, boolean followLink)
+    {
+        return HDF5ObjectType.isDataType(getObjectType(objectPath, followLink));
+    }
+
+    @Override
+    public boolean isDataType(final String objectPath)
+    {
+        return HDF5ObjectType.isDataType(getObjectType(objectPath));
+    }
+
+    @Override
+    public boolean isSoftLink(final String objectPath)
+    {
+        return HDF5ObjectType.isSoftLink(getObjectType(objectPath, false));
+    }
+
+    @Override
+    public boolean isExternalLink(final String objectPath)
+    {
+        return HDF5ObjectType.isExternalLink(getObjectType(objectPath, false));
+    }
+
+    @Override
+    public boolean isSymbolicLink(final String objectPath)
+    {
+        return HDF5ObjectType.isSymbolicLink(getObjectType(objectPath, false));
+    }
+
+    @Override
+    public String tryGetSymbolicLinkTarget(final String objectPath)
+    {
+        return getLinkInformation(objectPath).tryGetSymbolicLinkTarget();
+    }
+
+    @Override
+    public String tryGetDataTypePath(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> dataTypeNameCallable =
+                new ICallableWithCleanUp<String>()
+                    {
+                        @Override
+                        public String call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final int dataTypeId =
+                                    baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                            return baseReader.tryGetDataTypePath(dataTypeId);
+                        }
+                    };
+        return baseReader.runner.call(dataTypeNameCallable);
+    }
+
+    @Override
+    public String tryGetDataTypePath(HDF5DataType type)
+    {
+        assert type != null;
+
+        baseReader.checkOpen();
+        type.check(baseReader.fileId);
+        return baseReader.tryGetDataTypePath(type.getStorageTypeId());
+    }
+
+    @Override
+    public List<String> getAttributeNames(final String objectPath)
+    {
+        assert objectPath != null;
+        baseReader.checkOpen();
+        return removeInternalNames(getAllAttributeNames(objectPath),
+                baseReader.houseKeepingNameSuffix, "/".equals(objectPath));
+    }
+
+    @Override
+    public List<String> getAllAttributeNames(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<List<String>> attributeNameReaderRunnable =
+                new ICallableWithCleanUp<List<String>>()
+                    {
+                        @Override
+                        public List<String> call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return baseReader.h5.getAttributeNames(objectId, registry);
+                        }
+                    };
+        return baseReader.runner.call(attributeNameReaderRunnable);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(final String dataSetPath,
+            final String attributeName)
+    {
+        return getAttributeInformation(dataSetPath, attributeName, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(final String dataSetPath,
+            final String attributeName, final DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        assert dataSetPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5DataTypeInformation> informationDeterminationRunnable =
+                new ICallableWithCleanUp<HDF5DataTypeInformation>()
+                    {
+                        @Override
+                        public HDF5DataTypeInformation call(ICleanUpRegistry registry)
+                        {
+                            try
+                            {
+                                final int objectId =
+                                        baseReader.h5.openObject(baseReader.fileId, dataSetPath,
+                                                registry);
+                                final int attributeId =
+                                        baseReader.h5.openAttribute(objectId, attributeName,
+                                                registry);
+                                final int dataTypeId =
+                                        baseReader.h5
+                                                .getDataTypeForAttribute(attributeId, registry);
+                                final HDF5DataTypeInformation dataTypeInformation =
+                                        baseReader.getDataTypeInformation(dataTypeId,
+                                                dataTypeInfoOptions, registry);
+                                if (dataTypeInformation.isArrayType() == false)
+                                {
+                                    final int[] dimensions =
+                                            MDAbstractArray.toInt(baseReader.h5
+                                                    .getDataDimensionsForAttribute(attributeId,
+                                                            registry));
+                                    if (dimensions.length > 0)
+                                    {
+                                        dataTypeInformation.setDimensions(dimensions);
+                                    }
+                                }
+                                return dataTypeInformation;
+                            } catch (RuntimeException ex)
+                            {
+                                throw ex;
+                            }
+                        }
+                    };
+        return baseReader.runner.call(informationDeterminationRunnable);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(final String dataSetPath)
+    {
+        return getDataSetInformation(dataSetPath, DataTypeInfoOptions.DEFAULT);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(final String dataSetPath,
+            final DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return getDataSetInformation(dataSetPath, dataTypeInfoOptions, true);
+    }
+
+    HDF5DataSetInformation getDataSetInformation(final String dataSetPath,
+            final DataTypeInfoOptions dataTypeInfoOptions, final boolean fillInDimensions)
+    {
+        assert dataSetPath != null;
+
+        baseReader.checkOpen();
+        return baseReader.getDataSetInformation(dataSetPath, dataTypeInfoOptions, fillInDimensions);
+    }
+
+    @Override
+    public long getSize(final String objectPath)
+    {
+        return getDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL).getSize();
+    }
+
+    @Override
+    public long getNumberOfElements(final String objectPath)
+    {
+        return getDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL).getNumberOfElements();
+    }
+
+    @Override
+    public int getElementSize(final String objectPath)
+    {
+        return getDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL, false)
+                .getTypeInformation().getElementSize();
+    }
+
+    @Override
+    public int getSpaceRank(String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.getSpaceRank(objectPath);
+    }
+
+    @Override
+    public long[] getSpaceDimensions(String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.getSpaceDimensions(objectPath);
+    }
+
+    @Override
+    public int getArrayRank(String objectPath)
+    {
+        final HDF5DataSetInformation info =
+                getDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL, false);
+        return info.getTypeInformation().getRank();
+    }
+
+    @Override
+    public int[] getArrayDimensions(String objectPath)
+    {
+        final HDF5DataSetInformation info =
+                getDataSetInformation(objectPath, DataTypeInfoOptions.MINIMAL, false);
+        return info.getTypeInformation().getDimensions();
+    }
+
+    @Override
+    public int getRank(String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.getRank(objectPath);
+    }
+
+    @Override
+    public long[] getDimensions(String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.getDimensions(objectPath);
+    }
+
+    // /////////////////////
+    // Copies
+    // /////////////////////
+
+    @Override
+    public void copy(final String sourceObject, final IHDF5Writer destinationWriter,
+            final String destinationObject)
+    {
+        baseReader.checkOpen();
+        final HDF5Writer dwriter = (HDF5Writer) destinationWriter;
+        if (dwriter.object() != this)
+        {
+            dwriter.checkOpen();
+        }
+        baseReader.copyObject(sourceObject, dwriter.getFileId(), destinationObject);
+    }
+
+    @Override
+    public void copy(String sourceObject, IHDF5Writer destinationWriter)
+    {
+        copy(sourceObject, destinationWriter, "/");
+    }
+
+    @Override
+    public void copyAll(IHDF5Writer destinationWriter)
+    {
+        copy("/", destinationWriter, "/");
+    }
+
+    // /////////////////////
+    // Group
+    // /////////////////////
+
+    @Override
+    public List<String> getGroupMembers(final String groupPath)
+    {
+        assert groupPath != null;
+
+        baseReader.checkOpen();
+        return baseReader.getGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getAllGroupMembers(final String groupPath)
+    {
+        assert groupPath != null;
+
+        baseReader.checkOpen();
+        return baseReader.getAllGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getGroupMemberPaths(final String groupPath)
+    {
+        assert groupPath != null;
+
+        baseReader.checkOpen();
+        return baseReader.getGroupMemberPaths(groupPath);
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets)
+    {
+        baseReader.checkOpen();
+        if (readLinkTargets)
+        {
+            return baseReader.h5.getGroupMemberLinkInfo(baseReader.fileId, groupPath, false,
+                    baseReader.houseKeepingNameSuffix);
+        } else
+        {
+            return baseReader.h5.getGroupMemberTypeInfo(baseReader.fileId, groupPath, false,
+                    baseReader.houseKeepingNameSuffix);
+        }
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getAllGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets)
+    {
+        baseReader.checkOpen();
+        if (readLinkTargets)
+        {
+            return baseReader.h5.getGroupMemberLinkInfo(baseReader.fileId, groupPath, true,
+                    baseReader.houseKeepingNameSuffix);
+        } else
+        {
+            return baseReader.h5.getGroupMemberTypeInfo(baseReader.fileId, groupPath, true,
+                    baseReader.houseKeepingNameSuffix);
+        }
+    }
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(final String objectPath)
+    {
+        baseReader.checkOpen();
+        return baseReader.tryGetTypeVariant(objectPath);
+    }
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(String objectPath, String attributeName)
+    {
+        baseReader.checkOpen();
+        return baseReader.tryGetTypeVariant(objectPath, attributeName);
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public boolean hasAttribute(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Boolean> writeRunnable = new ICallableWithCleanUp<Boolean>()
+            {
+                @Override
+                public Boolean call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    return baseReader.h5.existsAttribute(objectId, attributeName);
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadWriteInfoProviderHandler.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadWriteInfoProviderHandler.java
new file mode 100644
index 0000000..2aeb6af
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectReadWriteInfoProviderHandler.java
@@ -0,0 +1,334 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createAttributeTypeVariantAttributeName;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createObjectTypeVariantAttributeName;
+
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@link HDF5ObjectReadWriteInfoProviderHandler}.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5ObjectReadWriteInfoProviderHandler extends HDF5ObjectReadOnlyInfoProviderHandler
+        implements IHDF5ObjectReadWriteInfoProviderHandler
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5ObjectReadWriteInfoProviderHandler(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+
+        assert baseWriter != null;
+        this.baseWriter = baseWriter;
+    }
+
+    @Override
+    public void createHardLink(String currentPath, String newPath)
+    {
+        assert currentPath != null;
+        assert newPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.h5.createHardLink(baseWriter.fileId, currentPath, newPath);
+    }
+
+    @Override
+    public void createSoftLink(String targetPath, String linkPath)
+    {
+        assert targetPath != null;
+        assert linkPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.h5.createSoftLink(baseWriter.fileId, linkPath, targetPath);
+    }
+
+    @Override
+    public void createOrUpdateSoftLink(String targetPath, String linkPath)
+    {
+        assert targetPath != null;
+        assert linkPath != null;
+
+        baseWriter.checkOpen();
+        if (isSymbolicLink(linkPath))
+        {
+            delete(linkPath);
+        }
+        baseWriter.h5.createSoftLink(baseWriter.fileId, linkPath, targetPath);
+    }
+
+    @Override
+    public void createExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException
+    {
+        assert targetFileName != null;
+        assert targetPath != null;
+        assert linkPath != null;
+
+        baseWriter.checkOpen();
+        if (baseWriter.fileFormat.isHDF5_1_8_OK() == false)
+        {
+            throw new IllegalStateException(
+                    "External links are not allowed in strict HDF5 1.6.x compatibility mode.");
+        }
+        baseWriter.h5.createExternalLink(baseWriter.fileId, linkPath, targetFileName, targetPath);
+    }
+
+    @Override
+    public void createOrUpdateExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException
+    {
+        assert targetFileName != null;
+        assert targetPath != null;
+        assert linkPath != null;
+
+        baseWriter.checkOpen();
+        if (baseWriter.fileFormat.isHDF5_1_8_OK() == false)
+        {
+            throw new IllegalStateException(
+                    "External links are not allowed in strict HDF5 1.6.x compatibility mode.");
+        }
+        if (isSymbolicLink(linkPath))
+        {
+            delete(linkPath);
+        }
+        baseWriter.h5.createExternalLink(baseWriter.fileId, linkPath, targetFileName, targetPath);
+    }
+
+    @Override
+    public void delete(String objectPath)
+    {
+        baseWriter.checkOpen();
+        if (isGroup(objectPath, false))
+        {
+            for (String path : getGroupMemberPaths(objectPath))
+            {
+                delete(path);
+            }
+        }
+        baseWriter.h5.deleteObject(baseWriter.fileId, objectPath);
+    }
+
+    @Override
+    public void move(String oldLinkPath, String newLinkPath)
+    {
+        baseWriter.checkOpen();
+        baseWriter.h5.moveLink(baseWriter.fileId, oldLinkPath, newLinkPath);
+    }
+
+    // /////////////////////
+    // Group
+    // /////////////////////
+
+    @Override
+    public void createGroup(final String groupPath)
+    {
+        baseWriter.checkOpen();
+        baseWriter.h5.createGroup(baseWriter.fileId, groupPath);
+    }
+
+    @Override
+    public void createGroup(final String groupPath, final int sizeHint)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createGroupRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.h5.createOldStyleGroup(baseWriter.fileId, groupPath, sizeHint,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createGroupRunnable);
+    }
+
+    @Override
+    public void createGroup(final String groupPath, final int maxCompact, final int minDense)
+    {
+        baseWriter.checkOpen();
+        if (baseWriter.fileFormat.isHDF5_1_8_OK() == false)
+        {
+            throw new IllegalStateException(
+                    "New style groups are not allowed in strict HDF5 1.6.x compatibility mode.");
+        }
+        final ICallableWithCleanUp<Void> createGroupRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.h5.createNewStyleGroup(baseWriter.fileId, groupPath, maxCompact,
+                            minDense, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createGroupRunnable);
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void deleteAttribute(final String objectPath, final String name)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> deleteAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseWriter.h5.openObject(baseWriter.fileId, objectPath, registry);
+                    baseWriter.h5.deleteAttribute(objectId, name);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(deleteAttributeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void setDataSetSize(final String objectPath, final long newSize)
+    {
+        setDataSetDimensions(objectPath, new long[]
+            { newSize });
+    }
+
+    @Override
+    public void setDataSetDimensions(final String objectPath, final long[] newDimensions)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.setDataSetDimensions(objectPath, newDimensions, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    @Override
+    public void setTypeVariant(final String objectPath, final HDF5DataTypeVariant typeVariant)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+                {
+                    @Override
+                    public Void call(ICleanUpRegistry registry)
+                    {
+                        if (baseWriter.useSimpleDataSpaceForAttributes)
+                        {
+                            final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                                { 1 }, registry);
+                            baseWriter.setAttribute(
+                                    objectPath,
+                                    createObjectTypeVariantAttributeName(baseWriter.houseKeepingNameSuffix),
+                                    baseWriter.typeVariantDataType.getStorageTypeId(),
+                                    baseWriter.typeVariantDataType.getNativeTypeId(),
+                                    dataSpaceId,
+                                    baseWriter.typeVariantDataType.getEnumType().toStorageForm(
+                                            typeVariant.ordinal()), registry);
+                        } else
+                        {
+                            baseWriter.setAttribute(
+                                    objectPath,
+                                    createObjectTypeVariantAttributeName(baseWriter.houseKeepingNameSuffix),
+                                    baseWriter.typeVariantDataType.getStorageTypeId(),
+                                    baseWriter.typeVariantDataType.getNativeTypeId(),
+                                    -1,
+                                    baseWriter.typeVariantDataType.getEnumType().toStorageForm(
+                                            typeVariant.ordinal()), registry);
+                        }
+                        return null; // Nothing to return.
+                    }
+                };
+                baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setTypeVariant(final String objectPath, final String attributeName,
+            final HDF5DataTypeVariant typeVariant)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(
+                                        objectPath,
+                                        createAttributeTypeVariantAttributeName(attributeName,
+                                                baseWriter.houseKeepingNameSuffix),
+                                        baseWriter.typeVariantDataType.getStorageTypeId(),
+                                        baseWriter.typeVariantDataType.getNativeTypeId(),
+                                        dataSpaceId, baseWriter.typeVariantDataType.getEnumType()
+                                                .toStorageForm(typeVariant.ordinal()), registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(
+                                        objectPath,
+                                        createAttributeTypeVariantAttributeName(attributeName,
+                                                baseWriter.houseKeepingNameSuffix),
+                                        baseWriter.typeVariantDataType.getStorageTypeId(),
+                                        baseWriter.typeVariantDataType.getNativeTypeId(),
+                                        -1,
+                                        baseWriter.typeVariantDataType.getEnumType().toStorageForm(
+                                                typeVariant.ordinal()), registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void deleteTypeVariant(String objectPath)
+    {
+        deleteAttribute(objectPath,
+                createObjectTypeVariantAttributeName(baseWriter.houseKeepingNameSuffix));
+    }
+
+    @Override
+    public void deleteTypeVariant(String objectPath, String attributeName)
+    {
+        deleteAttribute(
+                objectPath,
+                createAttributeTypeVariantAttributeName(attributeName,
+                        baseWriter.houseKeepingNameSuffix));
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectType.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectType.java
new file mode 100644
index 0000000..9350956
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ObjectType.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An enumeration that represents the basic HDF5 object types.
+ * 
+ * @author Bernd Rinn
+ */
+public enum HDF5ObjectType
+{
+    DATASET, DATATYPE, GROUP, SOFT_LINK, EXTERNAL_LINK, OTHER, NONEXISTENT;
+
+    /**
+     * Returns <code>false</code>, if the <var>objectTypeOrNull</var> is equal to
+     * {@link #NONEXISTENT} and <code>true</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean exists(HDF5ObjectType objectType)
+    {
+        return (objectType != NONEXISTENT);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to {@link #GROUP} and
+     * <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean isGroup(HDF5ObjectType objectType)
+    {
+        return (objectType == GROUP);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to {@link #DATASET}
+     * and <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean isDataSet(HDF5ObjectType objectType)
+    {
+        return (objectType == DATASET);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to {@link #DATATYPE}
+     * and <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean isDataType(HDF5ObjectType objectType)
+    {
+        return (objectType == DATATYPE);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to {@link #SOFT_LINK}
+     * and <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean isSoftLink(HDF5ObjectType objectType)
+    {
+        return (objectType == SOFT_LINK);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to
+     * {@link #EXTERNAL_LINK} and <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check (can be <code>null</code>).
+     */
+    public static boolean isExternalLink(HDF5ObjectType objectType)
+    {
+        return (objectType == EXTERNAL_LINK);
+    }
+
+    /**
+     * Returns <code>true</code>, if the <var>objectTypeOrNull</var> is equal to either
+     * {@link #SOFT_LINK} or {@link #EXTERNAL_LINK}. and <code>false</code> otherwise.
+     * 
+     * @param objectType The object type to check.
+     */
+    public static boolean isSymbolicLink(HDF5ObjectType objectType)
+    {
+        return (objectType == SOFT_LINK) || (objectType == EXTERNAL_LINK);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueReader.java
new file mode 100644
index 0000000..03cb2a7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueReader.java
@@ -0,0 +1,309 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STRING;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@link IHDF5OpaqueReader}
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5OpaqueReader implements IHDF5OpaqueReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    HDF5OpaqueReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    @Override
+    public String tryGetOpaqueTag(final String objectPath)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> readTagCallable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    return baseReader.h5.tryGetOpaqueTag(dataTypeId);
+                }
+            };
+        return baseReader.runner.call(readTagCallable);
+    }
+
+    @Override
+    public HDF5OpaqueType tryGetOpaqueType(final String objectPath)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5OpaqueType> readTagCallable =
+                new ICallableWithCleanUp<HDF5OpaqueType>()
+                    {
+                        @Override
+                        public HDF5OpaqueType call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final int dataTypeId =
+                                    baseReader.h5.getDataTypeForDataSet(dataSetId,
+                                            baseReader.fileRegistry);
+                            final String opaqueTagOrNull =
+                                    baseReader.h5.tryGetOpaqueTag(dataTypeId);
+                            if (opaqueTagOrNull == null)
+                            {
+                                return null;
+                            } else
+                            {
+                                return new HDF5OpaqueType(baseReader.fileId, dataTypeId,
+                                        opaqueTagOrNull, baseReader);
+                            }
+                        }
+                    };
+        return baseReader.runner.call(readTagCallable);
+    }
+
+    @Override
+    public byte[] readArray(final String objectPath)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    final boolean isString =
+                            (baseReader.h5.getClassType(nativeDataTypeId) == H5T_STRING);
+                    byte[] data;
+                    if (isString)
+                    {
+                        if (baseReader.h5.isVariableLengthString(nativeDataTypeId))
+                        {
+                            String[] value = new String[1];
+                            baseReader.h5.readDataSetVL(dataSetId, nativeDataTypeId, value);
+                            try
+                            {
+                                data = value[0].getBytes(CharacterEncoding.ASCII.getCharSetName());
+                            } catch (UnsupportedEncodingException ex)
+                            {
+                                data = value[0].getBytes();
+                            }
+                        } else
+                        {
+                            final int size = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                            data = new byte[size];
+                            baseReader.h5.readDataSetNonNumeric(dataSetId, nativeDataTypeId, data);
+                        }
+                    } else
+                    {
+                        final int elementSize = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                        data = new byte[spaceParams.blockSize * elementSize];
+                        baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    }
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    return baseReader.getAttributeAsByteArray(objectId, attributeName, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, blockNumber * blockSize,
+                                    blockSize, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    checkNotAString(objectPath, nativeDataTypeId);
+                    final int elementSize = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                    final byte[] data = new byte[elementSize * spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    checkNotAString(objectPath, nativeDataTypeId);
+                    final int elementSize = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                    final byte[] data = new byte[elementSize * spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int readArrayToBlockWithOffset(final String objectPath, final byte[] buffer,
+            final int blockSize, final long offset, final int memoryOffset)
+            throws HDF5JavaException
+    {
+        if (blockSize + memoryOffset > buffer.length)
+        {
+            throw new HDF5JavaException("Buffer not large enough for blockSize and memoryOffset");
+        }
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Integer> readCallable = new ICallableWithCleanUp<Integer>()
+            {
+                @Override
+                public Integer call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, memoryOffset, offset,
+                                    blockSize, registry);
+                    final int nativeDataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    checkNotAString(objectPath, nativeDataTypeId);
+                    final int elementSize = baseReader.h5.getDataTypeSize(nativeDataTypeId);
+                    if ((blockSize + memoryOffset) * elementSize > buffer.length)
+                    {
+                        throw new HDF5JavaException(
+                                "Buffer not large enough for blockSize and memoryOffset");
+                    }
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, buffer);
+                    return spaceParams.blockSize;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<byte[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<byte[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<byte[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<byte[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<byte[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final byte[] block =
+                                        readArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5DataBlock<byte[]>(block, index.getAndIncIndex(),
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    private void checkNotAString(final String objectPath, final int nativeDataTypeId)
+    {
+        final boolean isString =
+                (baseReader.h5.getClassType(nativeDataTypeId) == H5T_STRING);
+        if (isString)
+        {
+            throw new HDF5JavaException(objectPath + " cannot be a String.");
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueType.java b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueType.java
new file mode 100644
index 0000000..13f6244
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueType.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+/**
+ * A class that represents an opaque data type for a given HDF5 file and tag.
+ *
+ * @author Bernd Rinn
+ */
+public final class HDF5OpaqueType extends HDF5DataType
+{
+
+    private final String tag;
+    
+    HDF5OpaqueType(int fileId, int typeId, String tag, HDF5BaseReader baseReader)
+    {
+        super(fileId, typeId, typeId, baseReader);
+
+        assert tag != null;
+        
+        this.tag = tag;
+    }
+
+    /**
+     * Returns the tag of this opaque type.
+     */
+    public String getTag()
+    {
+        return tag;
+    }
+
+    @Override
+    public String tryGetName()
+    {
+        return tag;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueWriter.java
new file mode 100644
index 0000000..abc3ae8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5OpaqueWriter.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.OPAQUE_PREFIX;
+import static ch.systemsx.cisd.hdf5.HDF5Utils.createDataTypePath;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@link IHDF5OpaqueWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5OpaqueWriter extends HDF5OpaqueReader implements IHDF5OpaqueWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5OpaqueWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String tag, final byte[] data)
+    {
+        writeArray(objectPath, tag, data,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String tag, final byte[] data,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert tag != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataTypeId = getOrCreateOpaqueTypeId(tag);
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, dataTypeId, new long[]
+                                { data.length }, 1, features, registry);
+                    H5Dwrite(dataSetId, dataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public HDF5OpaqueType createArray(String objectPath, String tag, int size)
+    {
+        return createArray(objectPath, tag, size,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final long size, final int blockSize)
+    {
+        return createArray(objectPath, tag, size, blockSize,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final int size, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert tag != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final int dataTypeId = getOrCreateOpaqueTypeId(tag);
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, dataTypeId, features, new long[]
+                            { 0 }, new long[]
+                            { size }, 1, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, dataTypeId, features, new long[]
+                            { size }, null, 1, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+        return new HDF5OpaqueType(baseWriter.fileId, dataTypeId, tag, baseWriter);
+    }
+
+    @Override
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final long size, final int blockSize, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert tag != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final int dataTypeId = getOrCreateOpaqueTypeId(tag);
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, dataTypeId, features, new long[]
+                        { size }, new long[]
+                        { blockSize }, 1, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+        return new HDF5OpaqueType(baseWriter.fileId, dataTypeId, tag, baseWriter);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final HDF5OpaqueType dataType,
+            final byte[] data, final long blockNumber)
+    {
+        assert objectPath != null;
+        assert dataType != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        dataType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { data.length };
+                    final long[] slabStartOrNull = new long[]
+                        { data.length * blockNumber };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { data.length * (blockNumber + 1) }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, dataType.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5OpaqueType dataType, final byte[] data, final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert dataType != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        dataType.check(baseWriter.fileId);
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, dataType.getNativeTypeId(), memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    private int getOrCreateOpaqueTypeId(final String tag)
+    {
+        final String dataTypePath =
+                createDataTypePath(OPAQUE_PREFIX, baseWriter.houseKeepingNameSuffix, tag);
+        int dataTypeId = baseWriter.getDataTypeId(dataTypePath);
+        if (dataTypeId < 0)
+        {
+            dataTypeId = baseWriter.h5.createDataTypeOpaque(1, tag, baseWriter.fileRegistry);
+            baseWriter.commitDataType(dataTypePath, dataTypeId);
+        }
+        return dataTypeId;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5Reader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5Reader.java
new file mode 100644
index 0000000..6559333
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5Reader.java
@@ -0,0 +1,2341 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever.IByteArrayInspector;
+
+/**
+ * A class for reading HDF5 files (HDF5 1.8.x and older).
+ * <p>
+ * The class focuses on ease of use instead of completeness. As a consequence not all features of a
+ * valid HDF5 files can be read using this class, but only a subset. (All information written by
+ * {@link HDF5Writer} can be read by this class.)
+ * <p>
+ * Usage:
+ * 
+ * <pre>
+ * HDF5Reader reader = new HDF5ReaderConfig("test.h5").reader();
+ * float[] f = reader.readFloatArray("/some/path/dataset");
+ * String s = reader.getAttributeString("/some/path/dataset", "some key");
+ * reader.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5Reader implements IHDF5Reader
+{
+    private final HDF5BaseReader baseReader;
+    
+    private final IHDF5FileLevelReadOnlyHandler fileHandler;
+    
+    private final IHDF5ObjectReadOnlyInfoProviderHandler objectHandler;
+
+    private final IHDF5ByteReader byteReader;
+
+    private final IHDF5ByteReader ubyteReader;
+
+    private final IHDF5ShortReader shortReader;
+
+    private final IHDF5ShortReader ushortReader;
+
+    private final IHDF5IntReader intReader;
+
+    private final IHDF5IntReader uintReader;
+
+    protected final IHDF5LongReader longReader;
+
+    private final IHDF5LongReader ulongReader;
+
+    private final IHDF5FloatReader floatReader;
+
+    private final IHDF5DoubleReader doubleReader;
+
+    private final IHDF5BooleanReader booleanReader;
+
+    private final IHDF5StringReader stringReader;
+
+    private final IHDF5EnumReader enumReader;
+
+    private final IHDF5CompoundReader compoundReader;
+
+    private final IHDF5DateTimeReader dateTimeReader;
+
+    private final HDF5TimeDurationReader timeDurationReader;
+
+    private final IHDF5ReferenceReader referenceReader;
+
+    private final IHDF5OpaqueReader opaqueReader;
+
+    HDF5Reader(final HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+        this.fileHandler = new HDF5FileLevelReadOnlyHandler(baseReader);
+        this.objectHandler = new HDF5ObjectReadOnlyInfoProviderHandler(baseReader);
+        this.byteReader = new HDF5ByteReader(baseReader);
+        this.ubyteReader = new HDF5UnsignedByteReader(baseReader);
+        this.shortReader = new HDF5ShortReader(baseReader);
+        this.ushortReader = new HDF5UnsignedShortReader(baseReader);
+        this.intReader = new HDF5IntReader(baseReader);
+        this.uintReader = new HDF5UnsignedIntReader(baseReader);
+        this.longReader = new HDF5LongReader(baseReader);
+        this.ulongReader = new HDF5UnsignedLongReader(baseReader);
+        this.floatReader = new HDF5FloatReader(baseReader);
+        this.doubleReader = new HDF5DoubleReader(baseReader);
+        this.booleanReader = new HDF5BooleanReader(baseReader);
+        this.stringReader = new HDF5StringReader(baseReader);
+        this.enumReader = new HDF5EnumReader(baseReader);
+        this.compoundReader = new HDF5CompoundReader(baseReader, enumReader);
+        this.dateTimeReader = new HDF5DateTimeReader(baseReader, (HDF5LongReader) longReader);
+        this.timeDurationReader = new HDF5TimeDurationReader(baseReader, (HDF5LongReader) longReader);
+        this.referenceReader = new HDF5ReferenceReader(baseReader);
+        this.opaqueReader = new HDF5OpaqueReader(baseReader);
+    }
+
+    void checkOpen()
+    {
+        baseReader.checkOpen();
+    }
+
+    int getFileId()
+    {
+        return baseReader.fileId;
+    }
+
+    // /////////////////////
+    // File
+    // /////////////////////
+    
+    @Override
+    public IHDF5FileLevelReadOnlyHandler file()
+    {
+        return fileHandler;
+    }
+
+    @Override
+    public boolean isPerformNumericConversions()
+    {
+        return baseReader.performNumericConversions;
+    }
+
+    @Override
+    public String getHouseKeepingNameSuffix()
+    {
+        return baseReader.houseKeepingNameSuffix;
+    }
+    
+    @Override
+    public File getFile()
+    {
+        return baseReader.hdf5File;
+    }
+
+    @Override
+    protected void finalize() throws Throwable
+    {
+        super.finalize();
+        close();
+    }
+
+    @Override
+    public void close()
+    {
+        baseReader.close();
+    }
+
+    @Override
+    public boolean isClosed()
+    {
+        return baseReader.isClosed();
+    }
+
+    // /////////////////////////////////
+    // Objects, links, groups and types
+    // /////////////////////////////////
+
+    @Override
+    public IHDF5ObjectReadOnlyInfoProviderHandler object()
+    {
+        return objectHandler;
+    }
+
+    @Override
+    public HDF5LinkInformation getLinkInformation(String objectPath)
+    {
+        return objectHandler.getLinkInformation(objectPath);
+    }
+
+    @Override
+    public HDF5ObjectInformation getObjectInformation(String objectPath)
+    {
+        return objectHandler.getObjectInformation(objectPath);
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(String objectPath, boolean followLink)
+    {
+        return objectHandler.getObjectType(objectPath, followLink);
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(String objectPath)
+    {
+        return objectHandler.getObjectType(objectPath);
+    }
+
+    @Override
+    public boolean exists(String objectPath, boolean followLink)
+    {
+        return objectHandler.exists(objectPath, followLink);
+    }
+
+    @Override
+    public boolean exists(String objectPath)
+    {
+        return objectHandler.exists(objectPath);
+    }
+
+    @Override
+    public String toHouseKeepingPath(String objectPath)
+    {
+        return objectHandler.toHouseKeepingPath(objectPath);
+    }
+
+    @Override
+    public boolean isHouseKeepingObject(String objectPath)
+    {
+        return objectHandler.isHouseKeepingObject(objectPath);
+    }
+
+    @Override
+    public boolean isGroup(String objectPath, boolean followLink)
+    {
+        return objectHandler.isGroup(objectPath, followLink);
+    }
+
+    @Override
+    public boolean isGroup(String objectPath)
+    {
+        return objectHandler.isGroup(objectPath);
+    }
+
+    @Override
+    public boolean isDataSet(String objectPath, boolean followLink)
+    {
+        return objectHandler.isDataSet(objectPath, followLink);
+    }
+
+    @Override
+    public boolean isDataSet(String objectPath)
+    {
+        return objectHandler.isDataSet(objectPath);
+    }
+
+    @Override
+    public boolean isDataType(String objectPath, boolean followLink)
+    {
+        return objectHandler.isDataType(objectPath, followLink);
+    }
+
+    @Override
+    public boolean isDataType(String objectPath)
+    {
+        return objectHandler.isDataType(objectPath);
+    }
+
+    @Override
+    public boolean isSoftLink(String objectPath)
+    {
+        return objectHandler.isSoftLink(objectPath);
+    }
+
+    @Override
+    public boolean isExternalLink(String objectPath)
+    {
+        return objectHandler.isExternalLink(objectPath);
+    }
+
+    @Override
+    public boolean isSymbolicLink(String objectPath)
+    {
+        return objectHandler.isSymbolicLink(objectPath);
+    }
+
+    @Override
+    public String tryGetSymbolicLinkTarget(String objectPath)
+    {
+        return objectHandler.tryGetSymbolicLinkTarget(objectPath);
+    }
+
+    @Override
+    public boolean hasAttribute(String objectPath, String attributeName)
+    {
+        return objectHandler.hasAttribute(objectPath, attributeName);
+    }
+
+    @Override
+    public List<String> getAttributeNames(String objectPath)
+    {
+        return objectHandler.getAttributeNames(objectPath);
+    }
+
+    @Override
+    public List<String> getAllAttributeNames(String objectPath)
+    {
+        return objectHandler.getAllAttributeNames(objectPath);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(String objectPath, String attributeName)
+    {
+        return objectHandler.getAttributeInformation(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(String objectPath, String attributeName,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return objectHandler.getAttributeInformation(objectPath, attributeName,
+                dataTypeInfoOptions);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(String dataSetPath)
+    {
+        return objectHandler.getDataSetInformation(dataSetPath);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(String dataSetPath,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return objectHandler.getDataSetInformation(dataSetPath, dataTypeInfoOptions);
+    }
+
+    @Override
+    public long getSize(String objectPath)
+    {
+        return objectHandler.getSize(objectPath);
+    }
+
+    @Override
+    public long getNumberOfElements(String objectPath)
+    {
+        return objectHandler.getNumberOfElements(objectPath);
+    }
+
+    @Override
+    public void copy(String sourceObject, IHDF5Writer destinationWriter, String destinationObject)
+    {
+        objectHandler.copy(sourceObject, destinationWriter, destinationObject);
+    }
+
+    @Override
+    public void copy(String sourceObject, IHDF5Writer destinationWriter)
+    {
+        objectHandler.copy(sourceObject, destinationWriter);
+    }
+
+    @Override
+    public void copyAll(IHDF5Writer destinationWriter)
+    {
+        objectHandler.copyAll(destinationWriter);
+    }
+
+    @Override
+    public List<String> getGroupMembers(String groupPath)
+    {
+        return objectHandler.getGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getAllGroupMembers(String groupPath)
+    {
+        return objectHandler.getAllGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getGroupMemberPaths(String groupPath)
+    {
+        return objectHandler.getGroupMemberPaths(groupPath);
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getGroupMemberInformation(String groupPath,
+            boolean readLinkTargets)
+    {
+        return objectHandler.getGroupMemberInformation(groupPath, readLinkTargets);
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getAllGroupMemberInformation(String groupPath,
+            boolean readLinkTargets)
+    {
+        return objectHandler.getAllGroupMemberInformation(groupPath, readLinkTargets);
+    }
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(String objectPath)
+    {
+        return objectHandler.tryGetTypeVariant(objectPath);
+    }
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(String objectPath, String attributeName)
+    {
+        return objectHandler.tryGetTypeVariant(objectPath, attributeName);
+    }
+
+    @Override
+    public String tryGetDataTypePath(String objectPath)
+    {
+        return objectHandler.tryGetDataTypePath(objectPath);
+    }
+
+    @Override
+    public String tryGetDataTypePath(HDF5DataType type)
+    {
+        return objectHandler.tryGetDataTypePath(type);
+    }
+
+    @Override
+    public boolean getBooleanAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return booleanReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public String getEnumAttributeAsString(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        return enumReader.getAttrAsString(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5EnumerationValue getEnumAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        return enumReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public <T extends Enum<T>> T getEnumAttribute(String objectPath, String attributeName,
+            Class<T> enumClass) throws HDF5JavaException
+    {
+        return enumReader.getAttr(objectPath, attributeName, enumClass);
+    }
+
+    @Override
+    public String[] getEnumArrayAttributeAsString(final String objectPath,
+            final String attributeName) throws HDF5JavaException
+    {
+        return enumReader.getArrayAttr(objectPath, attributeName).toStringArray();
+    }
+
+    @Override
+    public HDF5EnumerationValueArray getEnumArrayAttribute(final String objectPath,
+            final String attributeName) throws HDF5JavaException
+    {
+        return enumReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumType(String dataTypeName)
+    {
+        return enumReader.getType(dataTypeName);
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumType(String dataTypeName, String[] values)
+            throws HDF5JavaException
+    {
+        return enumReader.getType(dataTypeName, values);
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumType(String dataTypeName, String[] values, boolean check)
+            throws HDF5JavaException
+    {
+        return enumReader.getType(dataTypeName, values, check);
+    }
+
+    @Override
+    public HDF5EnumerationType getDataSetEnumType(String dataSetPath)
+    {
+        return enumReader.getDataSetType(dataSetPath);
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumTypeForObject(String dataSetPath)
+    {
+        return enumReader.getDataSetType(dataSetPath);
+    }
+
+    // /////////////////////
+    // Data Sets reading
+    // /////////////////////
+
+    //
+    // Opaque
+    //
+
+    @Override
+    public String tryGetOpaqueTag(String objectPath)
+    {
+        return opaqueReader.tryGetOpaqueTag(objectPath);
+    }
+
+    @Override
+    public HDF5OpaqueType tryGetOpaqueType(String objectPath)
+    {
+        return opaqueReader.tryGetOpaqueType(objectPath);
+    }
+
+    @Override
+    public IHDF5OpaqueReader opaque()
+    {
+        return opaqueReader;
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<byte[]>> getAsByteArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return opaqueReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public byte[] readAsByteArray(String objectPath)
+    {
+        return opaqueReader.readArray(objectPath);
+    }
+
+    @Override
+    public byte[] getAttributeAsByteArray(String objectPath, String attributeName)
+    {
+        return opaqueReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public byte[] readAsByteArrayBlock(String objectPath, int blockSize, long blockNumber)
+            throws HDF5JavaException
+    {
+        return opaqueReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public byte[] readAsByteArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+            throws HDF5JavaException
+    {
+        return opaqueReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public int readAsByteArrayToBlockWithOffset(String objectPath, byte[] buffer, int blockSize,
+            long offset, int memoryOffset) throws HDF5JavaException
+    {
+        return opaqueReader.readArrayToBlockWithOffset(objectPath, buffer, blockSize,
+                offset, memoryOffset);
+    }
+
+    //
+    // Boolean
+    //
+
+    @Override
+    public IHDF5BooleanReader bool()
+    {
+        return booleanReader;
+    }
+
+    @Override
+    public BitSet readBitField(String objectPath) throws HDF5DatatypeInterfaceException
+    {
+        return booleanReader.readBitField(objectPath);
+    }
+
+    @Override
+    public BitSet readBitFieldBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return booleanReader.readBitFieldBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public BitSet readBitFieldBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return booleanReader.readBitFieldBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public boolean isBitSetInBitField(String objectPath, int bitIndex)
+    {
+        return booleanReader.isBitSet(objectPath, bitIndex);
+    }
+
+    @Override
+    public boolean readBoolean(String objectPath) throws HDF5JavaException
+    {
+        return booleanReader.read(objectPath);
+    }
+
+    //
+    // Time & date
+    //
+
+    @Override
+    public IHDF5DateTimeReader time()
+    {
+        return dateTimeReader;
+    }
+
+    @Override
+    public IHDF5TimeDurationReader duration()
+    {
+        return timeDurationReader;
+    }
+
+    @Override
+    public long getTimeStampAttribute(String objectPath, String attributeName)
+    {
+        return dateTimeReader.getAttrAsLong(objectPath, attributeName);
+    }
+
+    @Override
+    public Date getDateAttribute(String objectPath, String attributeName)
+    {
+        return dateTimeReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public boolean isTimeStamp(String objectPath, String attributeName) throws HDF5JavaException
+    {
+        return dateTimeReader.isTimeStamp(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5TimeDuration getTimeDurationAttribute(String objectPath, String attributeName)
+    {
+        return timeDurationReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public boolean isTimeDuration(String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.isTimeDuration(objectPath);
+    }
+
+    @Override
+    public boolean isTimeStamp(String objectPath) throws HDF5JavaException
+    {
+        return dateTimeReader.isTimeStamp(objectPath);
+    }
+
+    @Override
+    public boolean isTimeDuration(String objectPath, String attributeName) throws HDF5JavaException
+    {
+        return timeDurationReader.isTimeDuration(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5TimeUnit tryGetTimeUnit(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return timeDurationReader.tryGetTimeUnit(objectPath, attributeName);
+    }
+
+    @Override
+    public long[] getTimeStampArrayAttribute(String objectPath, String attributeName)
+    {
+        return dateTimeReader.getArrayAttrAsLong(objectPath, attributeName);
+    }
+
+    @Override
+    public Date[] getDateArrayAttribute(String objectPath, String attributeName)
+    {
+        return dateTimeReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5TimeDurationArray getTimeDurationArrayAttribute(String objectPath,
+            String attributeName)
+    {
+        return timeDurationReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5TimeUnit tryGetTimeUnit(String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.tryGetTimeUnit(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public Iterable<HDF5DataBlock<long[]>> getTimeDurationArrayNaturalBlocks(String dataSetPath,
+            HDF5TimeUnit timeUnit) throws HDF5JavaException
+    {
+        return timeDurationReader.getTimeDurationArrayNaturalBlocks(dataSetPath, timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public Iterable<HDF5DataBlock<HDF5TimeDuration[]>> getTimeDurationAndUnitArrayNaturalBlocks(
+            String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.getTimeDurationAndUnitArrayNaturalBlocks(objectPath);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<long[]>> getTimeStampArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return dateTimeReader.getTimeStampArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public Date readDate(String objectPath) throws HDF5JavaException
+    {
+        return dateTimeReader.readDate(objectPath);
+    }
+
+    @Override
+    public Date[] readDateArray(String objectPath) throws HDF5JavaException
+    {
+        return dateTimeReader.readDateArray(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public long readTimeDuration(String objectPath, HDF5TimeUnit timeUnit) throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDuration(objectPath, timeUnit);
+    }
+
+    @Override
+    public HDF5TimeDuration readTimeDuration(String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.read(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public HDF5TimeDuration readTimeDurationAndUnit(String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDurationAndUnit(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public long[] readTimeDurationArray(String objectPath, HDF5TimeUnit timeUnit)
+            throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDurationArray(objectPath, timeUnit);
+    }
+
+    @Override
+    public HDF5TimeDurationArray readTimeDurationArray(String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.readArray(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArray(String objectPath)
+            throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDurationAndUnitArray(objectPath);
+    }
+
+    @Override
+    @Deprecated
+    public long[] readTimeDurationArrayBlock(String objectPath, int blockSize, long blockNumber,
+            HDF5TimeUnit timeUnit)
+    {
+        return timeDurationReader.readTimeDurationArrayBlock(objectPath, blockSize, blockNumber,
+                timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public long[] readTimeDurationArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset, HDF5TimeUnit timeUnit)
+    {
+        return timeDurationReader.readTimeDurationArrayBlockWithOffset(objectPath, blockSize, offset,
+                timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlock(String objectPath, int blockSize,
+            long blockNumber) throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDurationAndUnitArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlockWithOffset(String objectPath,
+            int blockSize, long offset) throws HDF5JavaException
+    {
+        return timeDurationReader.readTimeDurationAndUnitArrayBlockWithOffset(objectPath, blockSize,
+                offset);
+    }
+
+    @Override
+    public long readTimeStamp(String objectPath) throws HDF5JavaException
+    {
+        return dateTimeReader.readTimeStamp(objectPath);
+    }
+
+    @Override
+    public long[] readTimeStampArray(String objectPath) throws HDF5JavaException
+    {
+        return dateTimeReader.readTimeStampArray(objectPath);
+    }
+
+    @Override
+    public long[] readTimeStampArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return dateTimeReader.readTimeStampArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public long[] readTimeStampArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return dateTimeReader.readTimeStampArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    //
+    // Reference
+    //
+
+    @Override
+    public IHDF5ReferenceReader reference()
+    {
+        return referenceReader;
+    }
+
+    @Override
+    public String readObjectReference(final String objectPath)
+    {
+        return referenceReader.read(objectPath);
+    }
+
+    @Override
+    public String readObjectReference(String objectPath, boolean resolveName)
+    {
+        return referenceReader.read(objectPath, resolveName);
+    }
+
+    @Override
+    public String[] readObjectReferenceArrayBlock(String objectPath, int blockSize,
+            long blockNumber, boolean resolveName)
+    {
+        return referenceReader.readArrayBlock(objectPath, blockSize, blockNumber,
+                resolveName);
+    }
+
+    @Override
+    public String[] readObjectReferenceArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset, boolean resolveName)
+    {
+        return referenceReader.readArrayBlockWithOffset(objectPath, blockSize,
+                offset, resolveName);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArrayBlock(String objectPath,
+            int[] blockDimensions, long[] blockNumber, boolean resolveName)
+    {
+        return referenceReader.readMDArrayBlock(objectPath, blockDimensions,
+                blockNumber, resolveName);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset, boolean resolveName)
+    {
+        return referenceReader.readMDArrayBlockWithOffset(objectPath,
+                blockDimensions, offset, resolveName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getObjectReferenceArrayNaturalBlocks(
+            String dataSetPath, boolean resolveName)
+    {
+        return referenceReader.getArrayNaturalBlocks(dataSetPath, resolveName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getObjectReferenceMDArrayNaturalBlocks(
+            String dataSetPath, boolean resolveName)
+    {
+        return referenceReader.getMDArrayNaturalBlocks(dataSetPath, resolveName);
+    }
+
+    @Override
+    public String[] readObjectReferenceArray(String objectPath)
+    {
+        return referenceReader.readArray(objectPath);
+    }
+
+    @Override
+    public String[] readObjectReferenceArray(String objectPath, boolean resolveName)
+    {
+        return referenceReader.readArray(objectPath, resolveName);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArray(String objectPath)
+    {
+        return referenceReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArray(String objectPath, boolean resolveName)
+    {
+        return referenceReader.readMDArray(objectPath, resolveName);
+    }
+
+    @Override
+    public String getObjectReferenceAttribute(String objectPath, String attributeName,
+            boolean resolveName)
+    {
+        return referenceReader.getAttr(objectPath, attributeName, resolveName);
+    }
+
+    @Override
+    public String[] getObjectReferenceArrayAttribute(String objectPath, String attributeName,
+            boolean resolveName)
+    {
+        return referenceReader.getArrayAttr(objectPath, attributeName,
+                resolveName);
+    }
+
+    @Override
+    public MDArray<String> getObjectReferenceMDArrayAttribute(String objectPath,
+            String attributeName, boolean resolveName)
+    {
+        return referenceReader.getMDArrayAttr(objectPath, attributeName,
+                resolveName);
+    }
+
+    @Override
+    public HDF5TimeDurationArray readTimeDurationArrayBlock(String objectPath, int blockSize,
+            long blockNumber) throws HDF5JavaException
+    {
+        return timeDurationReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public HDF5TimeDurationArray readTimeDurationArrayBlockWithOffset(String objectPath,
+            int blockSize, long offset) throws HDF5JavaException
+    {
+        return timeDurationReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<HDF5TimeDurationArray>> getTimeDurationArrayNaturalBlocks(
+            String objectPath) throws HDF5JavaException
+    {
+        return timeDurationReader.getArrayNaturalBlocks(objectPath);
+    }
+
+    //
+    // References
+    //
+
+    @Override
+    public String resolvePath(String reference) throws HDF5JavaException
+    {
+        return referenceReader.resolvePath(reference);
+    }
+
+    @Override
+    public String getObjectReferenceAttribute(final String objectPath, final String attributeName)
+    {
+        return referenceReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public String[] getObjectReferenceArrayAttribute(String objectPath, String attributeName)
+    {
+        return referenceReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDArray<String> getObjectReferenceMDArrayAttribute(String objectPath,
+            String attributeName)
+    {
+        return referenceReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public String[] readObjectReferenceArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return referenceReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public String[] readObjectReferenceArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset)
+    {
+        return referenceReader.readArrayBlockWithOffset(objectPath, blockSize,
+                offset);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArrayBlock(String objectPath,
+            int[] blockDimensions, long[] blockNumber)
+    {
+        return referenceReader.readMDArrayBlock(objectPath, blockDimensions,
+                blockNumber);
+    }
+
+    @Override
+    public MDArray<String> readObjectReferenceMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset)
+    {
+        return referenceReader.readMDArrayBlockWithOffset(objectPath,
+                blockDimensions, offset);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getObjectReferenceArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return referenceReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getObjectReferenceMDArrayNaturalBlocks(
+            String dataSetPath)
+    {
+        return referenceReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    //
+    // String
+    //
+
+    @Override
+    public IHDF5StringReader string()
+    {
+        return stringReader;
+    }
+
+    @Override
+    public String getStringAttribute(String objectPath, String attributeName)
+    {
+        return stringReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public String[] getStringArrayAttribute(String objectPath, String attributeName)
+    {
+        return stringReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDArray<String> getStringMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return stringReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public String readString(String objectPath) throws HDF5JavaException
+    {
+        return stringReader.read(objectPath);
+    }
+
+    @Override
+    public String[] readStringArray(String objectPath) throws HDF5JavaException
+    {
+        return stringReader.readArray(objectPath);
+    }
+
+    @Override
+    public String[] readStringArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return stringReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public String[] readStringArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return stringReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDArray<String> readStringMDArray(String objectPath)
+    {
+        return stringReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDArray<String> readStringMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return stringReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDArray<String> readStringMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset)
+    {
+        return stringReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getStringArrayNaturalBlocks(String objectPath)
+            throws HDF5JavaException
+    {
+        return stringReader.getArrayNaturalBlocks(objectPath);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getStringMDArrayNaturalBlocks(
+            String objectPath)
+    {
+        return stringReader.getMDArrayNaturalBlocks(objectPath);
+    }
+
+    //
+    // Enums
+    //
+
+    @Override
+    public IHDF5EnumReader enums()
+    {
+        return enumReader;
+    }
+
+    @Override
+    public IHDF5EnumReader enumeration()
+    {
+        return enumReader;
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getEnumArrayNaturalBlocks(
+            String objectPath, HDF5EnumerationType enumType) throws HDF5JavaException
+    {
+        return enumReader.getArrayBlocks(objectPath, enumType);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getEnumArrayNaturalBlocks(
+            String objectPath) throws HDF5JavaException
+    {
+        return enumReader.getArrayBlocks(objectPath);
+    }
+
+    @Override
+    public HDF5EnumerationValue readEnum(String objectPath, HDF5EnumerationType enumType)
+            throws HDF5JavaException
+    {
+        return enumReader.read(objectPath, enumType);
+    }
+
+    @Override
+    public HDF5EnumerationValue readEnum(String objectPath) throws HDF5JavaException
+    {
+        return enumReader.read(objectPath);
+    }
+
+    @Override
+    public <T extends Enum<T>> T readEnum(String objectPath, Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        return enumReader.read(objectPath, enumClass);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArray(String objectPath, HDF5EnumerationType enumType)
+            throws HDF5JavaException
+    {
+        return enumReader.readArray(objectPath, enumType);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArray(String objectPath) throws HDF5JavaException
+    {
+        return enumReader.readArray(objectPath);
+    }
+
+    @Override
+    public <T extends Enum<T>> T[] readEnumArray(String objectPath, Class<T> enumClass)
+            throws HDF5JavaException
+    {
+        return readEnumArray(objectPath).toEnumArray(enumClass);
+    }
+
+    @Override
+    public String[] readEnumArrayAsString(String objectPath) throws HDF5JavaException
+    {
+        return enumReader.readArray(objectPath).toStringArray();
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArrayBlock(String objectPath,
+            HDF5EnumerationType enumType, int blockSize, long blockNumber)
+    {
+        return enumReader.readArrayBlock(objectPath, enumType, blockSize, blockNumber);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArrayBlock(String objectPath, int blockSize,
+            long blockNumber)
+    {
+        return enumReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArrayBlockWithOffset(String objectPath,
+            HDF5EnumerationType enumType, int blockSize, long offset)
+    {
+        return enumReader.readArrayBlockWithOffset(objectPath, enumType, blockSize, offset);
+    }
+
+    @Override
+    public HDF5EnumerationValueArray readEnumArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset)
+    {
+        return enumReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public String readEnumAsString(String objectPath) throws HDF5JavaException
+    {
+        return enumReader.readAsString(objectPath);
+    }
+
+    //
+    // Compounds
+    //
+
+    @Override
+    public IHDF5CompoundReader compounds()
+    {
+        return compoundReader;
+    }
+
+    @Override
+    public IHDF5CompoundReader compound()
+    {
+        return compoundReader;
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return compoundReader.getArrayBlocks(objectPath, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return compoundReader.getArrayBlocks(objectPath, type);
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        return compoundReader.getMDArrayBlocks(objectPath, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return compoundReader.getMDArrayBlocks(objectPath, type);
+    }
+
+    @Override
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            Class<T> pojoClass) throws HDF5JavaException
+    {
+        return compoundReader.getArrayBlocks(objectPath, pojoClass);
+    }
+
+    @Override
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, Class<T> pojoClass) throws HDF5JavaException
+    {
+        return compoundReader.getMDArrayBlocks(objectPath, pojoClass);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getCompoundDataSetInformation(String dataSetPath,
+            boolean sortAlphabetically) throws HDF5JavaException
+    {
+        final HDF5CompoundMemberInformation[] compoundInformation =
+                compoundReader.getDataSetInfo(dataSetPath, DataTypeInfoOptions.DEFAULT);
+        if (sortAlphabetically)
+        {
+            Arrays.sort(compoundInformation);
+        }
+        return compoundInformation;
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getCompoundDataSetInformation(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return compoundReader.getDataSetInfo(dataSetPath);
+    }
+
+    @Override
+    public <T> HDF5CompoundMemberInformation[] getCompoundMemberInformation(Class<T> compoundClass)
+    {
+        return compoundReader.getMemberInfo(compoundClass);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getCompoundMemberInformation(String dataTypeName)
+    {
+        return compoundReader.getMemberInfo(dataTypeName);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getCompoundType(Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return compoundReader.getType(pojoClass, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getCompoundType(String name, Class<T> compoundType,
+            HDF5CompoundMemberMapping... members)
+    {
+        return compoundReader.getType(name, compoundType, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetCompoundType(String objectPath, Class<T> compoundClass)
+    {
+        return compoundReader.getDataSetType(objectPath, compoundClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getAttributeCompoundType(String objectPath,
+            String attributeName, Class<T> pojoClass)
+    {
+        return compoundReader.getAttributeType(objectPath, attributeName, pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(Class<T> pojoClass)
+    {
+        return compoundReader.getInferredType(pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(String name, Class<T> compoundType)
+    {
+        return compoundReader.getInferredType(name, compoundType);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(String name, T template)
+    {
+        return compoundReader.getInferredType(name, template);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(T template)
+    {
+        return compoundReader.getInferredType(template);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(String name, T template,
+            HDF5CompoundMappingHints hints)
+    {
+        return compoundReader.getInferredType(name, template, hints);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredCompoundType(String name, List<String> memberNames,
+            List<?> template)
+    {
+        return compoundReader.getInferredType(name, memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredCompoundType(String name, String[] memberNames,
+            Object[] template)
+    {
+        return compoundReader.getInferredType(name, memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredCompoundType(List<String> memberNames,
+            List<?> template)
+    {
+        return compoundReader.getInferredType(memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredCompoundType(String[] memberNames,
+            Object[] template)
+    {
+        return compoundReader.getInferredType(memberNames, template);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedCompoundType(Class<T> compoundClass)
+    {
+        return compoundReader.getNamedType(compoundClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedCompoundType(String dataTypeName, Class<T> compoundClass)
+    {
+        return compoundReader.getNamedType(dataTypeName, compoundClass);
+    }
+
+    @Override
+    public <T> T readCompound(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return compoundReader.read(objectPath, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> T readCompound(String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException
+    {
+        return compoundReader.read(objectPath, type);
+    }
+
+    @Override
+    public <T> T readCompound(String objectPath, Class<T> pojoClass) throws HDF5JavaException
+    {
+        return compoundReader.read(objectPath, pojoClass);
+    }
+
+    @Override
+    public <T> T[] readCompoundArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return compoundReader.readArray(objectPath, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readCompoundArray(String objectPath, HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return compoundReader.readArray(objectPath, type);
+    }
+
+    @Override
+    public <T> T[] readCompoundArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        return compoundReader.readArray(objectPath, pojoClass);
+    }
+
+    @Override
+    public <T> T[] readCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long blockNumber, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        return compoundReader.readArrayBlock(objectPath, type, blockSize, blockNumber,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long blockNumber) throws HDF5JavaException
+    {
+        return compoundReader.readArrayBlock(objectPath, type, blockSize, blockNumber);
+    }
+
+    @Override
+    public <T> T[] readCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        return compoundReader.readArrayBlockWithOffset(objectPath, type, blockSize, offset,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> T[] readCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset) throws HDF5JavaException
+    {
+        return compoundReader.readArrayBlockWithOffset(objectPath, type, blockSize, offset);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return compoundReader.readMDArray(objectPath, type, inspectorOrNull);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, HDF5CompoundType<T> type)
+            throws HDF5JavaException
+    {
+        return compoundReader.readMDArray(objectPath, type);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException
+    {
+        return compoundReader.readMDArray(objectPath, pojoClass);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException
+    {
+        return compoundReader.readMDArrayBlock(objectPath, type, blockDimensions, blockNumber,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber) throws HDF5JavaException
+    {
+        return compoundReader.readMDArrayBlock(objectPath, type, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, int[] blockDimensions, long[] offset,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException
+    {
+        return compoundReader.readMDArrayBlockWithOffset(objectPath, type, blockDimensions, offset,
+                inspectorOrNull);
+    }
+
+    @Override
+    public <T> MDArray<T> readCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, int[] blockDimensions, long[] offset)
+            throws HDF5JavaException
+    {
+        return compoundReader.readMDArrayBlockWithOffset(objectPath, type, blockDimensions, offset);
+    }
+
+    // ------------------------------------------------------------------------------
+    // Primite types - START
+    // ------------------------------------------------------------------------------
+
+    @Override
+    public byte[] getByteArrayAttribute(String objectPath, String attributeName)
+    {
+        return byteReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<byte[]>> getByteArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return byteReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public byte getByteAttribute(String objectPath, String attributeName)
+    {
+        return byteReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDByteArray getByteMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return byteReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDByteArray>> getByteMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return byteReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public byte[][] getByteMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return byteReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public byte readByte(String objectPath)
+    {
+        return byteReader.read(objectPath);
+    }
+
+    @Override
+    public byte[] readByteArray(String objectPath)
+    {
+        return byteReader.readArray(objectPath);
+    }
+
+    @Override
+    public byte[] readByteArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return byteReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public byte[] readByteArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return byteReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDByteArray readByteMDArray(String objectPath)
+    {
+        return byteReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDByteArray readByteMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return byteReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDByteArray readByteMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return byteReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public byte[][] readByteMatrix(String objectPath) throws HDF5JavaException
+    {
+        return byteReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public byte[][] readByteMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return byteReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public byte[][] readByteMatrixBlockWithOffset(String objectPath, int blockSizeX,
+            int blockSizeY, long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return byteReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY,
+                offsetX, offsetY);
+    }
+
+    @Override
+    public int[] readToByteMDArrayBlockWithOffset(String objectPath, MDByteArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return byteReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToByteMDArrayWithOffset(String objectPath, MDByteArray array,
+            int[] memoryOffset)
+    {
+        return byteReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public double[] getDoubleArrayAttribute(String objectPath, String attributeName)
+    {
+        return doubleReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<double[]>> getDoubleArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return doubleReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public double getDoubleAttribute(String objectPath, String attributeName)
+    {
+        return doubleReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDDoubleArray getDoubleMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return doubleReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDDoubleArray>> getDoubleMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return doubleReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public double[][] getDoubleMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return doubleReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public double readDouble(String objectPath)
+    {
+        return doubleReader.read(objectPath);
+    }
+
+    @Override
+    public double[] readDoubleArray(String objectPath)
+    {
+        return doubleReader.readArray(objectPath);
+    }
+
+    @Override
+    public double[] readDoubleArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return doubleReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public double[] readDoubleArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return doubleReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDDoubleArray readDoubleMDArray(String objectPath)
+    {
+        return doubleReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDDoubleArray readDoubleMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return doubleReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDDoubleArray readDoubleMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return doubleReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public double[][] readDoubleMatrix(String objectPath) throws HDF5JavaException
+    {
+        return doubleReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public double[][] readDoubleMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return doubleReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public double[][] readDoubleMatrixBlockWithOffset(String objectPath, int blockSizeX,
+            int blockSizeY, long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return doubleReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY,
+                offsetX, offsetY);
+    }
+
+    @Override
+    public int[] readToDoubleMDArrayBlockWithOffset(String objectPath, MDDoubleArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return doubleReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToDoubleMDArrayWithOffset(String objectPath, MDDoubleArray array,
+            int[] memoryOffset)
+    {
+        return doubleReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public float[] getFloatArrayAttribute(String objectPath, String attributeName)
+    {
+        return floatReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<float[]>> getFloatArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return floatReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public float getFloatAttribute(String objectPath, String attributeName)
+    {
+        return floatReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDFloatArray getFloatMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return floatReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDFloatArray>> getFloatMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return floatReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public float[][] getFloatMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return floatReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public float readFloat(String objectPath)
+    {
+        return floatReader.read(objectPath);
+    }
+
+    @Override
+    public float[] readFloatArray(String objectPath)
+    {
+        return floatReader.readArray(objectPath);
+    }
+
+    @Override
+    public float[] readFloatArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return floatReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public float[] readFloatArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return floatReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDFloatArray readFloatMDArray(String objectPath)
+    {
+        return floatReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDFloatArray readFloatMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return floatReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDFloatArray readFloatMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return floatReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public float[][] readFloatMatrix(String objectPath) throws HDF5JavaException
+    {
+        return floatReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public float[][] readFloatMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return floatReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public float[][] readFloatMatrixBlockWithOffset(String objectPath, int blockSizeX,
+            int blockSizeY, long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return floatReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY,
+                offsetX, offsetY);
+    }
+
+    @Override
+    public int[] readToFloatMDArrayBlockWithOffset(String objectPath, MDFloatArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return floatReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToFloatMDArrayWithOffset(String objectPath, MDFloatArray array,
+            int[] memoryOffset)
+    {
+        return floatReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public int[] getIntArrayAttribute(String objectPath, String attributeName)
+    {
+        return intReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<int[]>> getIntArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return intReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public int getIntAttribute(String objectPath, String attributeName)
+    {
+        return intReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDIntArray getIntMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return intReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDIntArray>> getIntMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return intReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public int[][] getIntMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return intReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public int readInt(String objectPath)
+    {
+        return intReader.read(objectPath);
+    }
+
+    @Override
+    public int[] readIntArray(String objectPath)
+    {
+        return intReader.readArray(objectPath);
+    }
+
+    @Override
+    public int[] readIntArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return intReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public int[] readIntArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return intReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDIntArray readIntMDArray(String objectPath)
+    {
+        return intReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDIntArray readIntMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return intReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDIntArray readIntMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return intReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public int[][] readIntMatrix(String objectPath) throws HDF5JavaException
+    {
+        return intReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public int[][] readIntMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return intReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public int[][] readIntMatrixBlockWithOffset(String objectPath, int blockSizeX, int blockSizeY,
+            long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return intReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public int[] readToIntMDArrayBlockWithOffset(String objectPath, MDIntArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return intReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToIntMDArrayWithOffset(String objectPath, MDIntArray array, int[] memoryOffset)
+    {
+        return intReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public long[] getLongArrayAttribute(String objectPath, String attributeName)
+    {
+        return longReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<long[]>> getLongArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return longReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public long getLongAttribute(String objectPath, String attributeName)
+    {
+        return longReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDLongArray getLongMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return longReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getLongMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return longReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public long[][] getLongMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return longReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public long readLong(String objectPath)
+    {
+        return longReader.read(objectPath);
+    }
+
+    @Override
+    public long[] readLongArray(String objectPath)
+    {
+        return longReader.readArray(objectPath);
+    }
+
+    @Override
+    public long[] readLongArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return longReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public long[] readLongArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return longReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDLongArray readLongMDArray(String objectPath)
+    {
+        return longReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDLongArray readLongMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return longReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDLongArray readLongMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return longReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public long[][] readLongMatrix(String objectPath) throws HDF5JavaException
+    {
+        return longReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public long[][] readLongMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return longReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public long[][] readLongMatrixBlockWithOffset(String objectPath, int blockSizeX,
+            int blockSizeY, long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return longReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY,
+                offsetX, offsetY);
+    }
+
+    @Override
+    public int[] readToLongMDArrayBlockWithOffset(String objectPath, MDLongArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return longReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToLongMDArrayWithOffset(String objectPath, MDLongArray array,
+            int[] memoryOffset)
+    {
+        return longReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public short[] getShortArrayAttribute(String objectPath, String attributeName)
+    {
+        return shortReader.getArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<short[]>> getShortArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return shortReader.getArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public short getShortAttribute(String objectPath, String attributeName)
+    {
+        return shortReader.getAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public MDShortArray getShortMDArrayAttribute(String objectPath, String attributeName)
+    {
+        return shortReader.getMDArrayAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDShortArray>> getShortMDArrayNaturalBlocks(String dataSetPath)
+    {
+        return shortReader.getMDArrayNaturalBlocks(dataSetPath);
+    }
+
+    @Override
+    public short[][] getShortMatrixAttribute(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        return shortReader.getMatrixAttr(objectPath, attributeName);
+    }
+
+    @Override
+    public short readShort(String objectPath)
+    {
+        return shortReader.read(objectPath);
+    }
+
+    @Override
+    public short[] readShortArray(String objectPath)
+    {
+        return shortReader.readArray(objectPath);
+    }
+
+    @Override
+    public short[] readShortArrayBlock(String objectPath, int blockSize, long blockNumber)
+    {
+        return shortReader.readArrayBlock(objectPath, blockSize, blockNumber);
+    }
+
+    @Override
+    public short[] readShortArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return shortReader.readArrayBlockWithOffset(objectPath, blockSize, offset);
+    }
+
+    @Override
+    public MDShortArray readShortMDArray(String objectPath)
+    {
+        return shortReader.readMDArray(objectPath);
+    }
+
+    @Override
+    public MDShortArray readShortMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        return shortReader.readMDArrayBlock(objectPath, blockDimensions, blockNumber);
+    }
+
+    @Override
+    public MDShortArray readShortMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return shortReader.readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public short[][] readShortMatrix(String objectPath) throws HDF5JavaException
+    {
+        return shortReader.readMatrix(objectPath);
+    }
+
+    @Override
+    public short[][] readShortMatrixBlock(String objectPath, int blockSizeX, int blockSizeY,
+            long blockNumberX, long blockNumberY) throws HDF5JavaException
+    {
+        return shortReader.readMatrixBlock(objectPath, blockSizeX, blockSizeY, blockNumberX,
+                blockNumberY);
+    }
+
+    @Override
+    public short[][] readShortMatrixBlockWithOffset(String objectPath, int blockSizeX,
+            int blockSizeY, long offsetX, long offsetY) throws HDF5JavaException
+    {
+        return shortReader.readMatrixBlockWithOffset(objectPath, blockSizeX, blockSizeY,
+                offsetX, offsetY);
+    }
+
+    @Override
+    public int[] readToShortMDArrayBlockWithOffset(String objectPath, MDShortArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        return shortReader.readToMDArrayBlockWithOffset(objectPath, array, blockDimensions,
+                offset, memoryOffset);
+    }
+
+    @Override
+    public int[] readToShortMDArrayWithOffset(String objectPath, MDShortArray array,
+            int[] memoryOffset)
+    {
+        return shortReader.readToMDArrayWithOffset(objectPath, array, memoryOffset);
+    }
+
+    @Override
+    public IHDF5ByteReader int8()
+    {
+        return byteReader;
+    }
+
+    @Override
+    public IHDF5ByteReader uint8()
+    {
+        return ubyteReader;
+    }
+
+    @Override
+    public IHDF5ShortReader int16()
+    {
+        return shortReader;
+    }
+
+    @Override
+    public IHDF5ShortReader uint16()
+    {
+        return ushortReader;
+    }
+
+    @Override
+    public IHDF5IntReader int32()
+    {
+        return intReader;
+    }
+
+    @Override
+    public IHDF5IntReader uint32()
+    {
+        return uintReader;
+    }
+
+    @Override
+    public IHDF5LongReader int64()
+    {
+        return longReader;
+    }
+
+    @Override
+    public IHDF5LongReader uint64()
+    {
+        return ulongReader;
+    }
+
+    @Override
+    public IHDF5FloatReader float32()
+    {
+        return floatReader;
+    }
+
+    @Override
+    public IHDF5DoubleReader float64()
+    {
+        return doubleReader;
+    }
+
+    // ------------------------------------------------------------------------------
+    // Primitive types - END
+    // ------------------------------------------------------------------------------
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ReaderConfigurator.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ReaderConfigurator.java
new file mode 100644
index 0000000..89a1341
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ReaderConfigurator.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+/**
+ * If you want the reader to perform numeric conversions, call {@link #performNumericConversions()}
+ * before calling {@link #reader()}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ReaderConfigurator implements IHDF5ReaderConfigurator
+{
+
+    protected final File hdf5File;
+
+    protected boolean performNumericConversions;
+
+    protected boolean useUTF8CharEncoding;
+
+    protected boolean autoDereference = true;
+
+    protected HDF5Reader readerWriterOrNull;
+
+    HDF5ReaderConfigurator(File hdf5File)
+    {
+        assert hdf5File != null;
+
+        this.hdf5File = hdf5File.getAbsoluteFile();
+    }
+
+    @Override
+    public boolean platformSupportsNumericConversions()
+    {
+        // Note: code in here any known exceptions of platforms not supporting numeric conversions.
+        return true;
+    }
+
+    @Override
+    public HDF5ReaderConfigurator performNumericConversions()
+    {
+        if (platformSupportsNumericConversions())
+        {
+            this.performNumericConversions = true;
+        }
+        return this;
+    }
+
+    @Override
+    public HDF5ReaderConfigurator useUTF8CharacterEncoding()
+    {
+        this.useUTF8CharEncoding = true;
+        return this;
+    }
+
+    @Override
+    public HDF5ReaderConfigurator noAutoDereference()
+    {
+        this.autoDereference = false;
+        return this;
+    }
+
+    @Override
+    public IHDF5Reader reader()
+    {
+        if (readerWriterOrNull == null)
+        {
+            readerWriterOrNull =
+                    new HDF5Reader(new HDF5BaseReader(hdf5File, performNumericConversions,
+                            autoDereference, IHDF5WriterConfigurator.FileFormat.ALLOW_1_8, false,
+                            ""));
+        }
+        return readerWriterOrNull;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceReader.java
new file mode 100644
index 0000000..7996f33
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceReader.java
@@ -0,0 +1,651 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_REFERENCE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_REF_OBJ;
+
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * A reader for HDF5 references.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5ReferenceReader implements IHDF5ReferenceReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    HDF5ReferenceReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // /////////////////////
+    // Specific
+    // /////////////////////
+
+    @Override
+    public String resolvePath(String reference)
+    {
+        assert reference != null;
+
+        baseReader.checkOpen();
+        if (reference.charAt(0) != '\0')
+        {
+            throw new HDF5JavaException(String.format("'%s' is not a reference.", reference));
+        }
+        return baseReader.h5.getReferencedObjectName(baseReader.fileId,
+                Long.parseLong(reference.substring(1)));
+    }
+
+    private String refToStr(long reference)
+    {
+        return '\0' + Long.toString(reference);
+    }
+
+    private String[] refToStr(long[] references)
+    {
+        final String[] result = new String[references.length];
+        for (int i = 0; i < references.length; ++i)
+        {
+            result[i] = '\0' + Long.toString(references[i]);
+        }
+        return result;
+    }
+
+    private void checkReference(final int dataTypeId, final String objectPath)
+            throws HDF5JavaException
+    {
+        final boolean isReference = (baseReader.h5.getClassType(dataTypeId) == H5T_REFERENCE);
+        if (isReference == false)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath + " is not a reference.");
+        }
+    }
+
+    private void checkRank1(final int[] arrayDimensions, final String objectPath)
+    {
+        if (arrayDimensions.length != 1)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath
+                    + ": array needs to be of rank 1, but is of rank " + arrayDimensions.length);
+        }
+    }
+
+    private void checkRank1(final long[] arrayDimensions, final String objectPath)
+    {
+        if (arrayDimensions.length != 1)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath
+                    + ": array needs to be of rank 1, but is of rank " + arrayDimensions.length);
+        }
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public String getAttr(final String objectPath, final String attributeName)
+    {
+        return getAttr(objectPath, attributeName, true);
+    }
+
+    @Override
+    public String getAttr(final String objectPath, final String attributeName,
+            final boolean resolveName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> readRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int dataTypeId =
+                            baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                    checkReference(dataTypeId, objectPath);
+                    final long[] reference =
+                            baseReader.h5.readAttributeAsLongArray(attributeId, dataTypeId, 1);
+                    return resolveName ? baseReader.h5.getReferencedObjectName(attributeId,
+                            reference[0]) : refToStr(reference[0]);
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public String[] getArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        return getArrayAttr(objectPath, attributeName, true);
+    }
+
+    @Override
+    public String[] getArrayAttr(final String objectPath,
+            final String attributeName, final boolean resolveName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> getAttributeRunnable =
+                new ICallableWithCleanUp<String[]>()
+                    {
+                        @Override
+                        public String[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+                            final int attributeTypeId =
+                                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+                            final int memoryTypeId;
+                            final int len;
+                            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+                            {
+                                final int baseDataTypeId =
+                                        baseReader.h5.getBaseDataType(attributeTypeId, registry);
+                                checkReference(baseDataTypeId, objectPath);
+                                final int[] arrayDimensions =
+                                        baseReader.h5.getArrayDimensions(attributeTypeId);
+                                checkRank1(arrayDimensions, objectPath);
+                                len = arrayDimensions[0];
+                                memoryTypeId =
+                                        baseReader.h5.createArrayType(H5T_STD_REF_OBJ, len,
+                                                registry);
+                            } else
+                            {
+                                checkReference(attributeTypeId, objectPath);
+                                final long[] arrayDimensions =
+                                        baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                                                registry);
+                                checkRank1(arrayDimensions, objectPath);
+                                memoryTypeId = H5T_STD_REF_OBJ;
+                                len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+                            }
+                            final long[] references =
+                                    baseReader.h5.readAttributeAsLongArray(attributeId,
+                                            memoryTypeId, len);
+                            return resolveName ? baseReader.h5.getReferencedObjectNames(
+                                    attributeId, references) : refToStr(references);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDArray<String> getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        return getMDArrayAttr(objectPath, attributeName, true);
+    }
+
+    @Override
+    public MDArray<String> getMDArrayAttr(final String objectPath,
+            final String attributeName, final boolean resolveName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> getAttributeRunnable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            try
+                            {
+                                final int objectId =
+                                        baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                                registry);
+                                final int attributeId =
+                                        baseReader.h5.openAttribute(objectId, attributeName,
+                                                registry);
+                                final int attributeTypeId =
+                                        baseReader.h5
+                                                .getDataTypeForAttribute(attributeId, registry);
+                                final int memoryTypeId;
+                                final int[] arrayDimensions;
+                                if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+                                {
+                                    final int baseDataTypeId =
+                                            baseReader.h5
+                                                    .getBaseDataType(attributeTypeId, registry);
+                                    checkReference(baseDataTypeId, objectPath);
+                                    arrayDimensions =
+                                            baseReader.h5.getArrayDimensions(attributeTypeId);
+                                    memoryTypeId =
+                                            baseReader.h5.createArrayType(H5T_STD_REF_OBJ,
+                                                    arrayDimensions, registry);
+                                } else
+                                {
+                                    checkReference(attributeTypeId, objectPath);
+                                    arrayDimensions =
+                                            MDAbstractArray.toInt(baseReader.h5
+                                                    .getDataDimensionsForAttribute(attributeId,
+                                                            registry));
+                                    memoryTypeId = H5T_STD_REF_OBJ;
+                                }
+                                final int len;
+                                len = MDAbstractArray.getLength(arrayDimensions);
+                                final long[] references =
+                                        baseReader.h5.readAttributeAsLongArray(attributeId,
+                                                memoryTypeId, len);
+                                return new MDArray<String>(
+                                        resolveName ? baseReader.h5.getReferencedObjectNames(
+                                                attributeId, references) : refToStr(references),
+                                        arrayDimensions);
+                            } catch (IllegalArgumentException ex)
+                            {
+                                throw new HDF5JavaException(ex.getMessage());
+                            }
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public String read(final String objectPath)
+    {
+        return read(objectPath, true);
+    }
+
+    @Override
+    public String read(final String objectPath, final boolean resolveName)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> readRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int objectReferenceDataTypeId =
+                            baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    checkReference(objectReferenceDataTypeId, objectPath);
+                    final long[] reference = new long[1];
+                    baseReader.h5.readDataSet(dataSetId, objectReferenceDataTypeId, reference);
+                    return resolveName ? baseReader.h5.getReferencedObjectName(dataSetId,
+                            reference[0]) : refToStr(reference[0]);
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public String[] readArray(final String objectPath)
+    {
+        return readArray(objectPath, true);
+    }
+
+    @Override
+    public String[] readArray(final String objectPath, final boolean resolveName)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> readCallable = new ICallableWithCleanUp<String[]>()
+            {
+                @Override
+                public String[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                    final long[] references;
+                    if (baseReader.h5.getClassType(dataTypeId) == H5T_REFERENCE)
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, registry);
+                        checkRank1(spaceParams.dimensions, objectPath);
+                        references = new long[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, dataTypeId, spaceParams.memorySpaceId,
+                                spaceParams.dataSpaceId, references);
+                    } else if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY
+                            && baseReader.h5.getClassType(baseReader.h5.getBaseDataType(dataTypeId,
+                                    registry)) == H5T_REFERENCE)
+                    {
+                        final int spaceId = baseReader.h5.createScalarDataSpace();
+                        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+                        checkRank1(dimensions, objectPath);
+                        final int len = dimensions[0];
+                        references = new long[len];
+                        final int memoryTypeId =
+                                baseReader.h5.createArrayType(H5T_STD_REF_OBJ, len, registry);
+                        baseReader.h5.readDataSet(dataSetId, memoryTypeId, spaceId, spaceId,
+                                references);
+                    } else
+                    {
+                        throw new HDF5JavaException("Dataset " + objectPath
+                                + " is not a reference.");
+                    }
+                    return resolveName ? baseReader.h5.getReferencedObjectNames(baseReader.fileId,
+                            references) : refToStr(references);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber
+                * blockSize, true);
+    }
+
+    @Override
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber, final boolean resolveName)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber
+                * blockSize, resolveName);
+    }
+
+    @Override
+    public String[] readArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, offset, true);
+    }
+
+    @Override
+    public String[] readArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset, final boolean resolveName)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> readCallable = new ICallableWithCleanUp<String[]>()
+            {
+                @Override
+                public String[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final long[] references = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_STD_REF_OBJ,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, references);
+                    return resolveName ? baseReader.h5.getReferencedObjectNames(baseReader.fileId,
+                            references) : refToStr(references);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public MDArray<String> readMDArray(final String objectPath)
+    {
+        return readMDArray(objectPath, true);
+    }
+
+    @Override
+    public MDArray<String> readMDArray(final String objectPath,
+            final boolean resolveName)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> readCallable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final int dataTypeId =
+                                    baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                            final long[] references;
+                            final int[] dimensions;
+                            if (baseReader.h5.getClassType(dataTypeId) == H5T_REFERENCE)
+                            {
+                                final DataSpaceParameters spaceParams =
+                                        baseReader.getSpaceParameters(dataSetId, registry);
+                                dimensions = MDAbstractArray.toInt(spaceParams.dimensions);
+                                references = new long[spaceParams.blockSize];
+                                baseReader.h5.readDataSet(dataSetId, dataTypeId,
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                        references);
+                            } else if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY
+                                    && baseReader.h5.getClassType(baseReader.h5.getBaseDataType(
+                                            dataTypeId, registry)) == H5T_REFERENCE)
+                            {
+                                final int spaceId = baseReader.h5.createScalarDataSpace();
+                                dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+                                final int len = MDAbstractArray.getLength(dimensions);
+                                references = new long[len];
+                                final int memoryTypeId =
+                                        baseReader.h5.createArrayType(H5T_STD_REF_OBJ, len,
+                                                registry);
+                                baseReader.h5.readDataSet(dataSetId, memoryTypeId, spaceId,
+                                        spaceId, references);
+                            } else
+                            {
+                                throw new HDF5JavaException("Dataset " + objectPath
+                                        + " is not a reference.");
+                            }
+                            final String[] referencedObjectNames =
+                                    resolveName ? baseReader.h5.getReferencedObjectNames(
+                                            baseReader.fileId, references) : refToStr(references);
+                            return new MDArray<String>(referencedObjectNames, dimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber)
+    {
+        return readMDArrayBlock(objectPath, blockDimensions, blockNumber, true);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber, final boolean resolveName)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset,
+                resolveName);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset, true);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset, final boolean resolveName)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> readCallable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset,
+                                            blockDimensions, registry);
+                            final long[] referencesBlock = new long[spaceParams.blockSize];
+                            baseReader.h5.readDataSet(dataSetId, H5T_STD_REF_OBJ,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                    referencesBlock);
+                            final String[] referencedObjectNamesBlock =
+                                    resolveName ? baseReader.h5.getReferencedObjectNames(
+                                            baseReader.fileId, referencesBlock)
+                                            : refToStr(referencesBlock);
+                            return new MDArray<String>(referencedObjectNamesBlock, blockDimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(
+            final String dataSetPath) throws HDF5JavaException
+    {
+        return getArrayNaturalBlocks(dataSetPath, true);
+    }
+    
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(
+            final String dataSetPath, final boolean resolveName) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<String[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<String[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<String[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<String[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final String[] referencesBlock =
+                                        readArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset, resolveName);
+                                return new HDF5DataBlock<String[]>(referencesBlock,
+                                        index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String dataSetPath)
+    {
+        return getMDArrayNaturalBlocks(dataSetPath, true);
+    }
+    
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String dataSetPath, final boolean resolveName)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDArray<String>>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDArray<String>>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDArray<String>>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDArray<String>> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDArray<String> data =
+                                        readMDArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset, resolveName);
+                                return new HDF5MDDataBlock<MDArray<String>>(data,
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceWriter.java
new file mode 100644
index 0000000..50fe5f6
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ReferenceWriter.java
@@ -0,0 +1,515 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5BaseReader.REFERENCE_SIZE_IN_BYTES;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_REF_OBJ;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5ReferenceWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5ReferenceWriter extends HDF5ReferenceReader implements IHDF5ReferenceWriter
+{
+
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5ReferenceWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name,
+            final String referencedObjectPath)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert referencedObjectPath != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            final byte[] reference =
+                                    baseWriter.h5.createObjectReference(baseWriter.fileId,
+                                            referencedObjectPath);
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_REF_OBJ,
+                                        H5T_STD_REF_OBJ, dataSpaceId, reference, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_REF_OBJ,
+                                        H5T_STD_REF_OBJ, -1, reference, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final String[] referencedObjectPaths)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert referencedObjectPaths != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths);
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { references.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_REF_OBJ, H5T_STD_REF_OBJ,
+                                dataSpaceId, references);
+                    } else
+                    {
+                        final int typeId =
+                                baseWriter.h5.createArrayType(H5T_STD_REF_OBJ,
+                                        referencedObjectPaths.length, registry);
+                        baseWriter.setAttribute(objectPath, name, typeId, typeId, -1, references);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> referencedObjectPaths)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert referencedObjectPaths != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths.getAsFlatArray());
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(
+                                        referencedObjectPaths.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_REF_OBJ, H5T_STD_REF_OBJ,
+                                dataSpaceId, references);
+                    } else
+                    {
+                        final int typeId =
+                                baseWriter.h5.createArrayType(H5T_STD_REF_OBJ,
+                                        referencedObjectPaths.dimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, typeId, typeId, -1, references);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(String objectPath, String referencedObjectPath)
+    {
+        assert objectPath != null;
+        assert referencedObjectPath != null;
+
+        baseWriter.checkOpen();
+        final byte[] reference =
+                baseWriter.h5.createObjectReference(baseWriter.fileId, referencedObjectPath);
+        baseWriter.writeScalar(objectPath, H5T_STD_REF_OBJ, H5T_STD_REF_OBJ, reference);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] referencedObjectPath)
+    {
+        writeArray(objectPath, referencedObjectPath, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] referencedObjectPaths,
+            final HDF5IntStorageFeatures features)
+    {
+        assert referencedObjectPaths != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths);
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_REF_OBJ, new long[]
+                                { referencedObjectPaths.length }, REFERENCE_SIZE_IN_BYTES,
+                                    features, registry);
+                    H5Dwrite(dataSetId, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, references);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features, new long[]
+                            { 0 }, new long[]
+                            { size }, REFERENCE_SIZE_IN_BYTES, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features, new long[]
+                            { size }, null, REFERENCE_SIZE_IN_BYTES, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features, new long[]
+                        { size }, new long[]
+                        { blockSize }, REFERENCE_SIZE_IN_BYTES, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final String[] referencedObjectPaths,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, referencedObjectPaths, referencedObjectPaths.length,
+                referencedObjectPaths.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final String[] referencedObjectPaths, final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert referencedObjectPaths != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths);
+                    H5Dwrite(dataSetId, H5T_STD_REF_OBJ, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            references);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> referencedObjectPaths)
+    {
+        writeMDArray(objectPath, referencedObjectPaths, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> referencedObjectPaths,
+            final HDF5IntStorageFeatures features)
+    {
+        assert referencedObjectPaths != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths.getAsFlatArray());
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_REF_OBJ,
+                                    referencedObjectPaths.longDimensions(),
+                                    REFERENCE_SIZE_IN_BYTES, features, registry);
+                    H5Dwrite(dataSetId, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, references);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features,
+                                nullDimensions, MDAbstractArray.toLong(dimensions),
+                                REFERENCE_SIZE_IN_BYTES, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features,
+                                MDAbstractArray.toLong(dimensions), null, REFERENCE_SIZE_IN_BYTES,
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_REF_OBJ, features, dimensions,
+                            MDAbstractArray.toLong(blockDimensions), REFERENCE_SIZE_IN_BYTES,
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = referencedObjectPaths.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, referencedObjectPaths, offset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] offset)
+    {
+        assert objectPath != null;
+        assert referencedObjectPaths != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = referencedObjectPaths.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    final long[] references =
+                            baseWriter.h5.createObjectReferences(baseWriter.fileId,
+                                    referencedObjectPaths.getAsFlatArray());
+                    H5Dwrite(dataSetId, H5T_STD_REF_OBJ, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            references);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDAbstractArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId,
+                            MDAbstractArray.toLong(memoryOffset), longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_STD_REF_OBJ, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ShortReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ShortReader.java
new file mode 100644
index 0000000..7f91e8b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ShortReader.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT16;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5ShortReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ShortReader implements IHDF5ShortReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5ShortReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public short getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Short> getAttributeRunnable = new ICallableWithCleanUp<Short>()
+            {
+                @Override
+                public Short call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final short[] data =
+                            baseReader.h5.readAttributeAsShortArray(attributeId, H5T_NATIVE_INT16, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public short[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> getAttributeRunnable =
+                new ICallableWithCleanUp<short[]>()
+                    {
+                        @Override
+                        public short[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getShortArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDShortArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDShortArray>()
+                    {
+                        @Override
+                        public MDShortArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getShortMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public short[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDShortArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public short read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Short> readCallable = new ICallableWithCleanUp<Short>()
+            {
+                @Override
+                public Short call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final short[] data = new short[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT16, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> readCallable = new ICallableWithCleanUp<short[]>()
+            {
+                @Override
+                public short[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readShortArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private short[] readShortArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final short[] data = new short[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT16, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readShortArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private short[] readShortArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final short[] data = new short[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT16, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDShortArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT16, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDShortArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT16, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public short[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> readCallable = new ICallableWithCleanUp<short[]>()
+            {
+                @Override
+                public short[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final short[] data = new short[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT16, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public short[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public short[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDShortArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDShortArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDShortArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDShortArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDShortArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> readCallable = new ICallableWithCleanUp<MDShortArray>()
+            {
+                @Override
+                public MDShortArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readShortMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDShortArray readShortMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final short[] data = new short[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT16, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDShortArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readShortMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDShortArray readShortMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT16, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final short[] data = new short[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDShortArray(data, arrayDimensions);
+        } else
+        {
+            final short[] data =
+                    new short[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDShortArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDShortArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDShortArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> readCallable = new ICallableWithCleanUp<MDShortArray>()
+            {
+                @Override
+                public MDShortArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final short[] dataBlock = new short[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT16,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDShortArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDShortArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final short[] dataBlock =
+                new short[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_INT16, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDShortArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<short[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<short[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<short[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<short[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<short[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final short[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<short[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDShortArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDShortArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDShortArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDShortArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDShortArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDShortArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDShortArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    short[] getShortArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_INT16, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_INT16;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final short[] data =
+                baseReader.h5.readAttributeAsShortArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDShortArray getShortMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_INT16,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_INT16;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final short[] data =
+                    baseReader.h5.readAttributeAsShortArray(attributeId,
+                            memoryTypeId, len);
+            return new MDShortArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ShortWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ShortWriter.java
new file mode 100644
index 0000000..6cad2d2
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ShortWriter.java
@@ -0,0 +1,703 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT16;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I16LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5ShortWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ShortWriter extends HDF5ShortReader implements IHDF5ShortWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5ShortWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final short value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I16LE,
+                                        H5T_NATIVE_INT16, dataSpaceId, new short[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_I16LE,
+                                        H5T_NATIVE_INT16, -1, new short[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final short[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I16LE, H5T_NATIVE_INT16,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT16, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I16LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDShortArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_I16LE, H5T_NATIVE_INT16,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT16, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I16LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final short[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDShortArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final short value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_I16LE, H5T_NATIVE_INT16, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final short[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final short[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, new long[]
+                                { data.length }, 2, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT16, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                            features, new long[] { 0 }, new long[] { size }, 2, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                            features, new long[] { size }, null, 2, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 2, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final short[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final short[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT16, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final short[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final short[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDShortArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final short[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDShortArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDShortArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDShortArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDShortArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDShortArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                                    data.longDimensions(), 2, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT16, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 2, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                                features, MDArray.toLong(dimensions), null, 2, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 2, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDShortArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDShortArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDShortArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT16, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT16, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5StorageLayout.java b/source/java/ch/systemsx/cisd/hdf5/HDF5StorageLayout.java
new file mode 100644
index 0000000..da691f7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5StorageLayout.java
@@ -0,0 +1,33 @@
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The storage layout of a data set in the HDF5 file. Not applicable for attributes.
+ * 
+ * @author Bernd Rinn
+ */
+public enum HDF5StorageLayout
+{
+    COMPACT(HDF5Constants.H5D_COMPACT), CONTIGUOUS(HDF5Constants.H5D_CONTIGUOUS), CHUNKED(
+            HDF5Constants.H5D_CHUNKED), NOT_APPLICABLE(-1);
+
+    private int id;
+
+    private HDF5StorageLayout(int id)
+    {
+        this.id = id;
+    }
+
+    static HDF5StorageLayout fromId(int id) throws IllegalArgumentException
+    {
+        for (HDF5StorageLayout layout : values())
+        {
+            if (layout.id == id)
+            {
+                return layout;
+            }
+        }
+        throw new IllegalArgumentException("Illegal layout id " + id);
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5StringReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5StringReader.java
new file mode 100644
index 0000000..adaffba
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5StringReader.java
@@ -0,0 +1,677 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5Utils.getOneDimensionalArraySize;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STRING;
+
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5StringReader}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5StringReader implements IHDF5StringReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    HDF5StringReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    //
+    // Attributes
+    //
+
+    @Override
+    public String getAttr(final String objectPath, final String attributeName)
+    {
+        return getStringAttribute(objectPath, attributeName, false);
+    }
+
+    @Override
+    public String getAttrRaw(final String objectPath, final String attributeName)
+    {
+        return getStringAttribute(objectPath, attributeName, true);
+    }
+
+    String getStringAttribute(final String objectPath, final String attributeName,
+            final boolean readRaw)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> readRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    return baseReader.getStringAttribute(objectId, objectPath, attributeName,
+                            readRaw, registry);
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public String[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        return getStringArrayAttribute(objectPath, attributeName, false);
+    }
+
+    @Override
+    public String[] getArrayAttrRaw(final String objectPath, final String attributeName)
+    {
+        return getStringArrayAttribute(objectPath, attributeName, true);
+    }
+
+    String[] getStringArrayAttribute(final String objectPath, final String attributeName,
+            final boolean readRaw)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> readRunnable = new ICallableWithCleanUp<String[]>()
+            {
+                @Override
+                public String[] call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    return baseReader.getStringArrayAttribute(objectId, objectPath, attributeName,
+                            readRaw, registry);
+                }
+            };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    @Override
+    public MDArray<String> getMDArrayAttr(final String objectPath, final String attributeName)
+    {
+        return getStringMDArrayAttribute(objectPath, attributeName, false);
+    }
+
+    @Override
+    public MDArray<String> getMDArrayAttrRaw(final String objectPath, final String attributeName)
+    {
+        return getStringMDArrayAttribute(objectPath, attributeName, true);
+    }
+
+    MDArray<String> getStringMDArrayAttribute(final String objectPath, final String attributeName,
+            final boolean readRaw)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> readRunnable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return baseReader.getStringMDArrayAttribute(objectId, objectPath,
+                                    attributeName, readRaw, registry);
+                        }
+                    };
+        return baseReader.runner.call(readRunnable);
+    }
+
+    //
+    // Data Sets
+    //
+
+    @Override
+    public String read(final String objectPath) throws HDF5JavaException
+    {
+        return readString(objectPath, false);
+    }
+
+    @Override
+    public String readRaw(String objectPath) throws HDF5JavaException
+    {
+        return readString(objectPath, true);
+    }
+
+    String readString(final String objectPath, final boolean readRaw) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String> writeRunnable = new ICallableWithCleanUp<String>()
+            {
+                @Override
+                public String call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int dataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    final boolean isString = (baseReader.h5.getClassType(dataTypeId) == H5T_STRING);
+                    if (isString == false)
+                    {
+                        throw new HDF5JavaException(objectPath + " needs to be a String.");
+                    }
+                    if (baseReader.h5.isVariableLengthString(dataTypeId))
+                    {
+                        String[] data = new String[1];
+                        baseReader.h5.readDataSetVL(dataSetId, dataTypeId, data);
+                        return data[0];
+                    } else
+                    {
+                        final int size = baseReader.h5.getDataTypeSize(dataTypeId);
+                        final CharacterEncoding encoding =
+                                baseReader.h5.getCharacterEncoding(dataTypeId);
+                        byte[] data = new byte[size];
+                        baseReader.h5.readDataSetNonNumeric(dataSetId, dataTypeId, data);
+                        return readRaw ? StringUtils.fromBytes(data, encoding) : StringUtils
+                                .fromBytes0Term(data, encoding);
+                    }
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public String[] readArrayRaw(final String objectPath) throws HDF5JavaException
+    {
+        return readStringArray(objectPath, true);
+    }
+
+    @Override
+    public String[] readArray(final String objectPath) throws HDF5JavaException
+    {
+        return readStringArray(objectPath, false);
+    }
+
+    String[] readStringArray(final String objectPath, final boolean readRaw)
+            throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> writeRunnable = new ICallableWithCleanUp<String[]>()
+            {
+                @Override
+                public String[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final long[] dimensions = baseReader.h5.getDataDimensions(dataSetId, registry);
+                    final int oneDimSize = getOneDimensionalArraySize(dimensions);
+                    final String[] data = new String[oneDimSize];
+                    final int dataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    if (baseReader.h5.isVariableLengthString(dataTypeId))
+                    {
+                        baseReader.h5.readDataSetVL(dataSetId, dataTypeId, data);
+                    } else
+                    {
+                        final boolean isString =
+                                (baseReader.h5.getClassType(dataTypeId) == H5T_STRING);
+                        if (isString == false)
+                        {
+                            throw new HDF5JavaException(objectPath + " needs to be a String.");
+                        }
+                        final int strLength;
+                        final byte[] bdata;
+                        if (readRaw)
+                        {
+                            strLength = baseReader.h5.getDataTypeSize(dataTypeId);
+                            bdata = new byte[oneDimSize * strLength];
+                            baseReader.h5.readDataSetNonNumeric(dataSetId, dataTypeId, bdata);
+                        } else
+                        {
+                            strLength = -1;
+                            bdata = null;
+                            baseReader.h5.readDataSetString(dataSetId, dataTypeId, data);
+                        }
+                        if (bdata != null && readRaw)
+                        {
+                            final CharacterEncoding encoding =
+                                    baseReader.h5.getCharacterEncoding(dataTypeId);
+                            for (int i = 0, startIdx = 0; i < oneDimSize; ++i, startIdx +=
+                                    strLength)
+                            {
+                                data[i] =
+                                        StringUtils.fromBytes(bdata, startIdx,
+                                                startIdx + strLength, encoding);
+                            }
+                        }
+                    }
+                    return data;
+                }
+            };
+        return baseReader.runner.call(writeRunnable);
+    }
+
+    @Override
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockSize * blockNumber);
+    }
+
+    @Override
+    public String[] readArrayBlockRaw(String objectPath, int blockSize, long blockNumber)
+    {
+        return readArrayBlockWithOffsetRaw(objectPath, blockSize, blockSize * blockNumber);
+    }
+
+    String[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset, final boolean readRaw)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<String[]> readCallable = new ICallableWithCleanUp<String[]>()
+            {
+                @Override
+                public String[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final String[] data = new String[spaceParams.blockSize];
+                    final int dataTypeId =
+                            baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                    if (baseReader.h5.isVariableLengthString(dataTypeId))
+                    {
+                        baseReader.h5.readDataSetVL(dataSetId, dataTypeId,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                    } else
+                    {
+                        final boolean isString =
+                                (baseReader.h5.getClassType(dataTypeId) == H5T_STRING);
+                        if (isString == false)
+                        {
+                            throw new HDF5JavaException(objectPath + " needs to be a String.");
+                        }
+
+                        final int strLength;
+                        final byte[] bdata;
+                        if (readRaw)
+                        {
+                            strLength = baseReader.h5.getDataTypeSize(dataTypeId);
+                            bdata = new byte[spaceParams.blockSize * strLength];
+                            baseReader.h5.readDataSetNonNumeric(dataSetId, dataTypeId,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, bdata);
+                        } else
+                        {
+                            strLength = -1;
+                            bdata = null;
+                            baseReader.h5.readDataSetString(dataSetId, dataTypeId,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                        }
+                        if (bdata != null && readRaw)
+                        {
+                            final CharacterEncoding encoding =
+                                    baseReader.h5.getCharacterEncoding(dataTypeId);
+                            for (int i = 0, startIdx = 0; i < spaceParams.blockSize; ++i, startIdx +=
+                                    strLength)
+                            {
+                                data[i] =
+                                        StringUtils.fromBytes(bdata, startIdx,
+                                                startIdx + strLength, encoding);
+                            }
+                        }
+                    }
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public String[] readArrayBlockWithOffset(String objectPath, int blockSize, long offset)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, offset, false);
+    }
+
+    @Override
+    public String[] readArrayBlockWithOffsetRaw(String objectPath, int blockSize, long offset)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, offset, true);
+    }
+
+    @Override
+    public MDArray<String> readMDArray(final String objectPath)
+    {
+        return readStringMDArray(objectPath, false);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayRaw(final String objectPath)
+    {
+        return readStringMDArray(objectPath, true);
+    }
+
+    MDArray<String> readStringMDArray(final String objectPath, final boolean readRaw)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> readCallable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, registry);
+                            final String[] data = new String[spaceParams.blockSize];
+                            final int dataTypeId =
+                                    baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                            if (baseReader.h5.isVariableLengthString(dataTypeId))
+                            {
+                                baseReader.h5.readDataSetVL(dataSetId, dataTypeId,
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            } else
+                            {
+                                final boolean isString =
+                                        (baseReader.h5.getClassType(dataTypeId) == H5T_STRING);
+                                if (isString == false)
+                                {
+                                    throw new HDF5JavaException(objectPath
+                                            + " needs to be a String.");
+                                }
+
+                                final int strLength;
+                                final byte[] bdata;
+                                if (readRaw)
+                                {
+                                    strLength = baseReader.h5.getDataTypeSize(dataTypeId);
+                                    bdata = new byte[spaceParams.blockSize * strLength];
+                                    baseReader.h5.readDataSetNonNumeric(dataSetId, dataTypeId,
+                                            bdata);
+                                } else
+                                {
+                                    strLength = -1;
+                                    bdata = null;
+                                    baseReader.h5.readDataSetString(dataSetId, dataTypeId, data);
+                                }
+                                if (bdata != null && readRaw)
+                                {
+                                    final CharacterEncoding encoding =
+                                            baseReader.h5.getCharacterEncoding(dataTypeId);
+                                    for (int i = 0, startIdx = 0; i < spaceParams.blockSize; ++i, startIdx +=
+                                            strLength)
+                                    {
+                                        data[i] =
+                                                StringUtils.fromBytes(bdata, startIdx, startIdx
+                                                        + strLength, encoding);
+                                    }
+                                }
+                            }
+                            return new MDArray<String>(data, spaceParams.dimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset, final boolean readRaw)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDArray<String>> readCallable =
+                new ICallableWithCleanUp<MDArray<String>>()
+                    {
+                        @Override
+                        public MDArray<String> call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset,
+                                            blockDimensions, registry);
+                            final String[] dataBlock = new String[spaceParams.blockSize];
+                            final int dataTypeId =
+                                    baseReader.h5.getNativeDataTypeForDataSet(dataSetId, registry);
+                            if (baseReader.h5.isVariableLengthString(dataTypeId))
+                            {
+                                baseReader.h5.readDataSetVL(dataSetId, dataTypeId,
+                                        spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                        dataBlock);
+                            } else
+                            {
+                                final boolean isString =
+                                        (baseReader.h5.getClassType(dataTypeId) == H5T_STRING);
+                                if (isString == false)
+                                {
+                                    throw new HDF5JavaException(objectPath
+                                            + " needs to be a String.");
+                                }
+
+                                final int strLength;
+                                byte[] bdata = null;
+                                if (readRaw)
+                                {
+                                    strLength = baseReader.h5.getDataTypeSize(dataTypeId);
+                                    bdata = new byte[spaceParams.blockSize * strLength];
+                                    baseReader.h5.readDataSetNonNumeric(dataSetId, dataTypeId,
+                                            spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                            bdata);
+                                } else
+                                {
+                                    strLength = -1;
+                                    baseReader.h5.readDataSetString(dataSetId, dataTypeId,
+                                            spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                            dataBlock);
+                                }
+                                if (bdata != null && readRaw)
+                                {
+                                    final CharacterEncoding encoding =
+                                            baseReader.h5.getCharacterEncoding(dataTypeId);
+                                    for (int i = 0, startIdx = 0; i < spaceParams.blockSize; ++i, startIdx +=
+                                            strLength)
+                                    {
+                                        dataBlock[i] =
+                                                StringUtils.fromBytes(bdata, startIdx, startIdx
+                                                        + strLength, encoding);
+                                    }
+                                }
+                            }
+                            return new MDArray<String>(dataBlock, blockDimensions);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset, false);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlockWithOffsetRaw(String objectPath, int[] blockDimensions,
+            long[] offset)
+    {
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset, true);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDArray<String> readMDArrayBlockRaw(String objectPath, int[] blockDimensions,
+            long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffsetRaw(objectPath, blockDimensions, offset);
+    }
+
+    Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(final String dataSetPath,
+            final boolean readRaw) throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<String[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<String[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<String[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<String[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final String[] block =
+                                        readRaw ? readArrayBlockWithOffsetRaw(dataSetPath,
+                                                index.getBlockSize(), offset)
+                                                : readArrayBlockWithOffset(dataSetPath,
+                                                        index.getBlockSize(), offset);
+                                return new HDF5DataBlock<String[]>(block, index.getAndIncIndex(),
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        return getArrayNaturalBlocks(dataSetPath, false);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocksRaw(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return getArrayNaturalBlocks(dataSetPath, true);
+    }
+
+    Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(final String objectPath,
+            final boolean readRaw)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(objectPath));
+
+        return new Iterable<HDF5MDDataBlock<MDArray<String>>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDArray<String>>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDArray<String>>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDArray<String>> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDArray<String> data =
+                                        readRaw ? readMDArrayBlockWithOffsetRaw(objectPath,
+                                                index.getBlockSize(), offset)
+                                                : readMDArrayBlockWithOffset(objectPath,
+                                                        index.getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDArray<String>>(data,
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String objectPath)
+    {
+        return getMDArrayNaturalBlocks(objectPath, false);
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocksRaw(String objectPath)
+    {
+        return getMDArrayNaturalBlocks(objectPath, true);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5StringWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5StringWriter.java
new file mode 100644
index 0000000..f0c2880
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5StringWriter.java
@@ -0,0 +1,964 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5DwriteString;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseWriter.StringArrayBuffer;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5StringWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5StringWriter extends HDF5StringReader implements IHDF5StringWriter
+{
+
+    private static final int MAX_COMPACT_SIZE = 64 * 1024 - 12;
+
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5StringWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttrVL(final String objectPath, final String name,
+            final String value)
+    {
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseWriter.h5.openObject(baseWriter.fileId, objectPath,
+                                            registry);
+                            baseWriter.setStringAttributeVariableLength(objectId, name, value,
+                                    registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final String value)
+    {
+        setStringAttribute(objectPath, name, value, value.length(), true);
+    }
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final String value,
+            final int maxLength)
+    {
+        setStringAttribute(objectPath, name, value, maxLength, false);
+    }
+
+    void setStringAttribute(final String objectPath, final String name, final String value,
+            final int maxLength, final boolean lengthFitsValue)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseWriter.h5.openObject(baseWriter.fileId, objectPath,
+                                            registry);
+                            baseWriter.setStringAttribute(objectId, name, value, maxLength,
+                                    lengthFitsValue, registry);
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final String[] value)
+    {
+        setStringArrayAttribute(objectPath, name, value, -1, true);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final String[] value, final int maxLength)
+    {
+        setStringArrayAttribute(objectPath, name, value, maxLength, false);
+    }
+
+    void setStringArrayAttribute(final String objectPath, final String name, final String[] value,
+            final int maxLength, final boolean lengthFitsValue)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseWriter.h5.openObject(baseWriter.fileId, objectPath, registry);
+                    baseWriter.setStringArrayAttribute(objectId, name, value, maxLength,
+                            lengthFitsValue, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> value)
+    {
+        setStringMDArrayAttribute(objectPath, name, value, -1, true);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> value, final int maxLength)
+    {
+        setStringMDArrayAttribute(objectPath, name, value, maxLength, false);
+    }
+
+    void setStringMDArrayAttribute(final String objectPath, final String name,
+            final MDArray<String> value, final int maxLength, final boolean lengthFitsValue)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseWriter.h5.openObject(baseWriter.fileId, objectPath, registry);
+                    baseWriter.setStringArrayAttribute(objectId, name, value, maxLength,
+                            lengthFitsValue, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final String data, final int maxLength)
+    {
+        writeString(objectPath, data, maxLength, true,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void write(final String objectPath, final String data)
+    {
+        writeString(objectPath, data, data.length(), true,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void write(final String objectPath, final String data,
+            final HDF5GenericStorageFeatures features)
+    {
+        writeString(objectPath, data, data.length(), true, features);
+    }
+
+    @Override
+    public void write(final String objectPath, final String data, final int maxLength,
+            final HDF5GenericStorageFeatures features)
+    {
+        writeString(objectPath, data, maxLength, false, features);
+    }
+
+    // Implementation note: this needs special treatment as we want to create a (possibly chunked)
+    // data set with max dimension 1 instead of infinity.
+    void writeString(final String objectPath, final String data, final int maxLength,
+            final boolean lengthFitsValue, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final byte[] bytes;
+                    final int realMaxLengthInBytes;
+                    if (lengthFitsValue)
+                    {
+                        bytes = StringUtils.toBytes0Term(data, baseWriter.encodingForNewDataSets);
+                        realMaxLengthInBytes = (bytes.length == 1) ? 1 : bytes.length - 1;
+                    } else
+                    {
+                        bytes =
+                                StringUtils.toBytes0Term(data, maxLength,
+                                        baseWriter.encodingForNewDataSets);
+                        realMaxLengthInBytes =
+                                baseWriter.encodingForNewDataSets.getMaxBytesPerChar()
+                                        * ((maxLength == 0) ? 1 : maxLength);
+                    }
+
+                    boolean exists = baseWriter.h5.exists(baseWriter.fileId, objectPath);
+                    if (exists && baseWriter.keepDataIfExists(features) == false)
+                    {
+                        baseWriter.h5.deleteObject(baseWriter.fileId, objectPath);
+                        exists = false;
+                    }
+                    final int stringDataTypeId =
+                            baseWriter.h5.createDataTypeString(realMaxLengthInBytes, registry);
+                    if (features.requiresChunking() == false)
+                    {
+                        // If we do not want to compress, we can create a scalar dataset.
+                        baseWriter.writeScalar(objectPath, stringDataTypeId, stringDataTypeId,
+                                bytes, features.allowsCompact()
+                                        && (realMaxLengthInBytes < MAX_COMPACT_SIZE),
+                                baseWriter.keepDataIfExists(features), registry);
+                    } else
+                    {
+                        final long[] chunkSizeOrNull =
+                                HDF5Utils.tryGetChunkSizeForString(realMaxLengthInBytes,
+                                        features.requiresChunking());
+                        final int dataSetId;
+                        if (exists)
+                        {
+                            dataSetId =
+                                    baseWriter.h5.openDataSet(baseWriter.fileId, objectPath,
+                                            registry);
+
+                        } else
+                        {
+                            final HDF5StorageLayout layout =
+                                    baseWriter.determineLayout(stringDataTypeId,
+                                            HDF5Utils.SCALAR_DIMENSIONS, chunkSizeOrNull, null);
+                            dataSetId =
+                                    baseWriter.h5.createDataSet(baseWriter.fileId,
+                                            HDF5Utils.SCALAR_DIMENSIONS, chunkSizeOrNull,
+                                            stringDataTypeId, features, objectPath, layout,
+                                            baseWriter.fileFormat, registry);
+                        }
+                        H5Dwrite(dataSetId, stringDataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, bytes);
+                    }
+                    return null; // Nothing to return.
+                }
+
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeStringArray(objectPath, data, getMaxLength(data), true, features, false);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] data)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeStringArray(objectPath, data, getMaxLength(data), true,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION, false);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] data, final int maxLength)
+    {
+        writeStringArray(objectPath, data, maxLength, false,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION, false);
+    }
+
+    private static int getMaxLength(String[] data)
+    {
+        int maxLength = 0;
+        for (String s : data)
+        {
+            maxLength = Math.max(maxLength, s.length());
+        }
+        return maxLength;
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final String[] data, int maxLength,
+            final HDF5GenericStorageFeatures features) throws HDF5JavaException
+    {
+        assert maxLength >= 0;
+
+        writeStringArray(objectPath, data, maxLength, false, features, false);
+    }
+
+    private void writeStringArray(final String objectPath, final String[] data,
+            final int maxLength, final boolean lengthFitsValue,
+            final HDF5GenericStorageFeatures features, final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    if (variableLength)
+                    {
+                        final int elementSize = 8; // 64bit pointers
+                        final int stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, stringDataTypeId,
+                                        new long[]
+                                            { data.length }, elementSize, features, registry);
+                        baseWriter.writeStringVL(dataSetId, data);
+                    } else
+                    {
+                        final StringArrayBuffer array =
+                                baseWriter.new StringArrayBuffer(maxLength, lengthFitsValue);
+                        array.addAll(data);
+                        final byte[] arrData = array.toArray();
+                        final int elementSize = array.getMaxLengthInByte();
+                        final int stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, stringDataTypeId,
+                                        new long[]
+                                            { data.length }, elementSize, features, registry);
+                        H5Dwrite(dataSetId, stringDataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                                arrData);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int maxLength, final int size)
+    {
+        createArray(objectPath, maxLength, size, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize)
+    {
+        createArray(objectPath, maxLength, size, blockSize, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int maxLength, final int size,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert maxLength > 0;
+
+        createStringArray(objectPath, maxLength, size, features, false);
+    }
+
+    private void createStringArray(final String objectPath, final int maxLength, final int size,
+            final HDF5GenericStorageFeatures features, final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int elementSize;
+                    final int stringDataTypeId;
+                    if (variableLength)
+                    {
+                        elementSize = 8; // 64bit pointers
+                        stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                    } else
+                    {
+                        elementSize =
+                                baseWriter.encodingForNewDataSets.getMaxBytesPerChar() * maxLength;
+                        stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                    }
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, stringDataTypeId, features, new long[]
+                            { 0 }, new long[]
+                            { size }, elementSize, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, stringDataTypeId, features, new long[]
+                            { size }, null, elementSize, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features)
+    {
+        assert maxLength > 0;
+
+        createStringArray(objectPath, maxLength, size, blockSize, features, false);
+    }
+
+    private void createStringArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features,
+            final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert blockSize > 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int elementSize;
+                    final int stringDataTypeId;
+                    if (variableLength)
+                    {
+                        elementSize = 8; // 64bit pointers
+                        stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                    } else
+                    {
+                        elementSize =
+                                baseWriter.encodingForNewDataSets.getMaxBytesPerChar() * maxLength;
+                        stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                    }
+                    baseWriter.createDataSet(objectPath, stringDataTypeId, features, new long[]
+                        { size }, new long[]
+                        { blockSize }, elementSize, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final String[] data,
+            final long blockNumber)
+    {
+        assert data != null;
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final String[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    final int stringDataTypeId =
+                            baseWriter.h5.getDataTypeForDataSet(dataSetId, registry);
+                    if (baseWriter.h5.isVariableLengthString(stringDataTypeId))
+                    {
+                        baseWriter.writeStringVL(dataSetId, memorySpaceId, dataSpaceId, data);
+                    } else
+                    {
+                        final int maxLength = baseWriter.h5.getDataTypeSize(stringDataTypeId);
+                        writeStringArray(dataSetId, stringDataTypeId, memorySpaceId, dataSpaceId,
+                                H5P_DEFAULT, data, maxLength);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> data)
+            throws HDF5JavaException
+    {
+        writeStringMDArray(objectPath, data, getMaxLength(data.getAsFlatArray()), true,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION, false);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final HDF5GenericStorageFeatures features) throws HDF5JavaException
+    {
+        writeStringMDArray(objectPath, data, getMaxLength(data.getAsFlatArray()), true, features,
+                false);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength) throws HDF5JavaException
+    {
+        writeMDArray(objectPath, data, maxLength,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength, final HDF5GenericStorageFeatures features)
+            throws HDF5JavaException
+    {
+        writeStringMDArray(objectPath, data, maxLength, false, features, false);
+    }
+
+    private void writeStringMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength, final boolean lengthFitsValue,
+            final HDF5GenericStorageFeatures features, final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert maxLength >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    if (variableLength)
+                    {
+                        final int elementSize = 8; // 64bit pointers
+                        final int stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, stringDataTypeId,
+                                        data.longDimensions(), elementSize, features, registry);
+                        baseWriter.writeStringVL(dataSetId, data.getAsFlatArray());
+                    } else
+                    {
+                        final StringArrayBuffer array =
+                                baseWriter.new StringArrayBuffer(maxLength, lengthFitsValue);
+                        array.addAll(data.getAsFlatArray());
+                        final byte[] arrData = array.toArray();
+                        final int elementSize = array.getMaxLengthInByte();
+                        final int stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                        final int dataSetId =
+                                baseWriter.getOrCreateDataSetId(objectPath, stringDataTypeId,
+                                        data.longDimensions(), elementSize, features, registry);
+                        H5Dwrite(dataSetId, stringDataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                                arrData);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions)
+    {
+        createMDArray(objectPath, maxLength, dimensions, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize)
+    {
+        createMDArray(objectPath, maxLength, dimensions, blockSize, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions, final HDF5GenericStorageFeatures features)
+    {
+        assert maxLength > 0;
+
+        createStringMDArray(objectPath, maxLength, dimensions, features, false);
+    }
+
+    private void createStringMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions, final HDF5GenericStorageFeatures features,
+            final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int elementSize;
+                    final int stringDataTypeId;
+                    if (variableLength)
+                    {
+                        elementSize = 8; // 64bit pointers
+                        stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                    } else
+                    {
+                        elementSize =
+                                baseWriter.encodingForNewDataSets.getMaxBytesPerChar() * maxLength;
+                        stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                    }
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, stringDataTypeId, features, new long[]
+                            { 0 }, MDAbstractArray.toLong(dimensions), maxLength, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, stringDataTypeId, features,
+                                MDAbstractArray.toLong(dimensions), null, maxLength, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert maxLength > 0;
+
+        createStringMDArray(objectPath, maxLength, dimensions, blockSize, features, false);
+    }
+
+    private void createStringMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize,
+            final HDF5GenericStorageFeatures features, final boolean variableLength)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int elementSize;
+                    final int stringDataTypeId;
+                    if (variableLength)
+                    {
+                        elementSize = 8; // 64bit pointers
+                        stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                    } else
+                    {
+                        elementSize =
+                                baseWriter.encodingForNewDataSets.getMaxBytesPerChar() * maxLength;
+                        stringDataTypeId =
+                                baseWriter.h5.createDataTypeString(elementSize, registry);
+                    }
+                    baseWriter.createDataSet(objectPath, stringDataTypeId, features, dimensions,
+                            MDAbstractArray.toLong(blockSize), elementSize, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDArray<String> data,
+            final long[] blockNumber)
+    {
+        assert data != null;
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> data, final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    final int stringDataTypeId =
+                            baseWriter.h5.getDataTypeForDataSet(dataSetId, registry);
+                    if (baseWriter.h5.isVariableLengthString(stringDataTypeId))
+                    {
+                        baseWriter.writeStringVL(dataSetId, memorySpaceId, dataSpaceId,
+                                data.getAsFlatArray());
+                    } else
+                    {
+                        final int maxLength = baseWriter.h5.getDataTypeSize(stringDataTypeId);
+                        writeStringArray(dataSetId, stringDataTypeId, memorySpaceId, dataSpaceId,
+                                H5P_DEFAULT, data.getAsFlatArray(), maxLength);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeVL(final String objectPath, final String data)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId;
+                    if (baseWriter.h5.exists(baseWriter.fileId, objectPath))
+                    {
+                        dataSetId =
+                                baseWriter.h5.openObject(baseWriter.fileId, objectPath, registry);
+                    } else
+                    {
+                        dataSetId =
+                                baseWriter.h5.createScalarDataSet(baseWriter.fileId,
+                                        baseWriter.variableLengthStringDataTypeId, objectPath,
+                                        true, registry);
+                    }
+                    H5DwriteString(dataSetId, baseWriter.variableLengthStringDataTypeId,
+                            H5S_SCALAR, H5S_SCALAR, H5P_DEFAULT, new String[]
+                                { data });
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeArrayVL(final String objectPath, final String[] data)
+    {
+        writeArrayVL(objectPath, data, GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArrayVL(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features)
+    {
+        writeStringArray(objectPath, data, -1, false, features, true);
+    }
+
+    @Override
+    public void createArrayVL(final String objectPath, final int size)
+    {
+        createArrayVL(objectPath, size,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArrayVL(final String objectPath, final long size,
+            final int blockSize) throws HDF5JavaException
+    {
+        createArrayVL(objectPath, size, blockSize,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArrayVL(final String objectPath, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features)
+    {
+        createStringArray(objectPath, -1, size, blockSize, features, true);
+    }
+
+    @Override
+    public void createArrayVL(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features)
+    {
+        createStringArray(objectPath, -1, size, features, true);
+    }
+
+    @Override
+    public void createMDArrayVL(final String objectPath, final int[] dimensions,
+            final HDF5GenericStorageFeatures features)
+    {
+        createStringMDArray(objectPath, -1, dimensions, features, true);
+    }
+
+    @Override
+    public void createMDArrayVL(final String objectPath, final int[] dimensions)
+    {
+        createStringMDArray(objectPath, -1, dimensions, GENERIC_NO_COMPRESSION, true);
+    }
+
+    @Override
+    public void createMDArrayVL(final String objectPath, final long[] dimensions,
+            final int[] blockSize, final HDF5GenericStorageFeatures features)
+    {
+        createStringMDArray(objectPath, -1, dimensions, blockSize, features, true);
+    }
+
+    @Override
+    public void createMDArrayVL(final String objectPath, final long[] dimensions,
+            final int[] blockSize)
+    {
+        createStringMDArray(objectPath, -1, dimensions, blockSize, GENERIC_NO_COMPRESSION, true);
+    }
+
+    @Override
+    public void writeMDArrayVL(final String objectPath,
+            final MDArray<String> data, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int pointerSize = 8; // 64bit pointers
+                    final int stringDataTypeId = baseWriter.variableLengthStringDataTypeId;
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, stringDataTypeId,
+                                    MDAbstractArray.toLong(data.dimensions()), pointerSize,
+                                    features, registry);
+                    baseWriter.writeStringVL(dataSetId, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayVL(final String objectPath, final MDArray<String> data)
+    {
+        writeMDArrayVL(objectPath, data, GENERIC_NO_COMPRESSION);
+    }
+
+    /**
+     * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the
+     * application memory data object into the file.
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param obj String array with data to be written to the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @return a non-negative value if successful
+     * @exception HDF5Exception - Failure in the data conversion.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data object is null.
+     */
+    private int writeStringArray(final int dataset_id, final int mem_type_id,
+            final int mem_space_id, final int file_space_id, final int xfer_plist_id,
+            final String[] obj, final int maxLength) throws HDF5Exception, HDF5LibraryException,
+            NullPointerException
+    {
+        final byte[] buf = StringUtils.toBytes(obj, maxLength, baseWriter.encodingForNewDataSets);
+
+        /* will raise exception on error */
+        final int status =
+                H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf);
+
+        return status;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDuration.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDuration.java
new file mode 100644
index 0000000..3f11d1a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDuration.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An object to store a time duration.
+ *
+ * @author Bernd Rinn
+ */
+public class HDF5TimeDuration
+{
+    private final long duration;
+    
+    private final HDF5TimeUnit timeUnit;
+    
+    public HDF5TimeDuration(long duration, HDF5TimeUnit timeUnit)
+    {
+        this.duration = duration;
+        this.timeUnit = timeUnit;
+    }
+
+    /**
+     * The time duration, see {@link #getUnit()} for the time unit.
+     */
+    public long getValue()
+    {
+        return duration;
+    }
+
+    /**
+     * The time duration in the given <var>targetUnit</var>.
+     */
+    public long getValue(HDF5TimeUnit targetUnit)
+    {
+        return (targetUnit == timeUnit) ? duration : targetUnit.convert(duration, timeUnit);
+    }
+
+    /**
+     * The time unit of the duration.
+     */
+    public HDF5TimeUnit getUnit()
+    {
+        return timeUnit;
+    }
+
+    /**
+     * Returns <code>true</code>, if <var>that</var> represents the same time duration.
+     */
+    public boolean isEquivalent(HDF5TimeDuration that)
+    {
+        if (this.timeUnit == that.timeUnit)
+        {
+            return this.duration == that.duration;
+        } else
+        {
+            return this.timeUnit.convert(that) == this.duration;
+        }
+    }
+    
+    //
+    // Object
+    //
+    
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + (int) (duration ^ (duration >>> 32));
+        result = prime * result + ((timeUnit == null) ? 0 : timeUnit.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+            return true;
+        if (obj == null)
+            return false;
+        if (getClass() != obj.getClass())
+            return false;
+        HDF5TimeDuration other = (HDF5TimeDuration) obj;
+        if (duration != other.duration)
+            return false;
+        if (timeUnit != other.timeUnit)
+            return false;
+        return true;
+    }
+    
+    @Override
+    public String toString()
+    {
+        return Long.toString(duration) + " " + timeUnit.toString();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationArray.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationArray.java
new file mode 100644
index 0000000..438090f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationArray.java
@@ -0,0 +1,219 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Arrays;
+
+/**
+ * An array of time durations.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5TimeDurationArray
+{
+    final long[] timeDurations;
+
+    final HDF5TimeUnit timeUnit;
+
+    /**
+     * Creates an array of <var>timeDurations</var> using a common <var>timeUnit</var>.
+     */
+    public HDF5TimeDurationArray(long[] timeDurations, HDF5TimeUnit timeUnit)
+    {
+        this.timeDurations = timeDurations;
+        this.timeUnit = timeUnit;
+    }
+
+    /**
+     * Creates a {@link HDF5TimeDurationArray} from the array of given <var>durationValues</var>
+     * with the given <var>timeUnit</var>.
+     */
+    public static HDF5TimeDurationArray create(HDF5TimeUnit timeUnit, long... durationValues)
+    {
+        if (durationValues.length == 0)
+        {
+            return new HDF5TimeDurationArray(new long[0], timeUnit);
+        }
+        return new HDF5TimeDurationArray(durationValues, timeUnit);
+    }
+
+    /**
+     * Creates a {@link HDF5TimeDurationArray} from the given <var>timeDurations</var>. Converts all
+     * values to the smallest time unit found in <var>timeDurations</var>.
+     */
+    public static HDF5TimeDurationArray create(HDF5TimeDuration... timeDurations)
+    {
+        if (timeDurations.length == 0)
+        {
+            return new HDF5TimeDurationArray(new long[0], HDF5TimeUnit.SECONDS);
+        }
+        HDF5TimeUnit unit = timeDurations[0].getUnit();
+        boolean needsConversion = false;
+        for (int i = 1; i < timeDurations.length; ++i)
+        {
+            final HDF5TimeUnit u = timeDurations[i].getUnit();
+            if (u != unit)
+            {
+                if (u.ordinal() < unit.ordinal())
+                {
+                    unit = u;
+                }
+                needsConversion = true;
+            }
+        }
+        final long[] durations = new long[timeDurations.length];
+        if (needsConversion)
+        {
+            for (int i = 0; i < timeDurations.length; ++i)
+            {
+                durations[i] = unit.convert(timeDurations[i]);
+            }
+        } else
+        {
+            for (int i = 0; i < timeDurations.length; ++i)
+            {
+                durations[i] = timeDurations[i].getValue();
+            }
+        }
+        return new HDF5TimeDurationArray(durations, unit);
+    }
+
+    /**
+     * Returns the time unit.
+     */
+    public HDF5TimeUnit getUnit()
+    {
+        return timeUnit;
+    }
+
+    /**
+     * Returns the time duration values.
+     */
+    public long[] getValues()
+    {
+        return timeDurations;
+    }
+
+    /**
+     * Returns the number of elements.
+     */
+    public int getLength()
+    {
+        return timeDurations.length;
+    }
+
+    /**
+     * Returns the time duration values in the given <var>targetUnit</var>.
+     */
+    public long[] getValues(HDF5TimeUnit targetUnit)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return timeDurations;
+        }
+        final long[] targetDurations = new long[timeDurations.length];
+        for (int i = 0; i < targetDurations.length; ++i)
+        {
+            targetDurations[i] = targetUnit.convert(timeDurations[i], timeUnit);
+        }
+        return targetDurations;
+    }
+
+    /**
+     * Returns the element <var>index</var>.
+     */
+    public HDF5TimeDuration get(int index)
+    {
+        return new HDF5TimeDuration(timeDurations[index], timeUnit);
+    }
+
+    /**
+     * Returns the element <var>index</var> in the given <var>targetUnit</var>.
+     */
+    public HDF5TimeDuration get(int index, HDF5TimeUnit targetUnit)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return new HDF5TimeDuration(timeDurations[index], timeUnit);
+        } else
+        {
+            return new HDF5TimeDuration(targetUnit.convert(timeDurations[index], timeUnit),
+                    targetUnit);
+        }
+    }
+
+    /**
+     * Returns the value element <var>index</var>.
+     */
+    public long getValue(int index)
+    {
+        return timeDurations[index];
+    }
+
+    /**
+     * Returns the value element <var>index</var> in the given <var>targetUnit</var>.
+     */
+    public long getValue(int index, HDF5TimeUnit targetUnit)
+    {
+        return (targetUnit == timeUnit) ? timeDurations[index] : targetUnit.convert(
+                timeDurations[index], timeUnit);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + Arrays.hashCode(timeDurations);
+        result = prime * result + ((timeUnit == null) ? 0 : timeUnit.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final HDF5TimeDurationArray other = (HDF5TimeDurationArray) obj;
+        if (Arrays.equals(timeDurations, other.timeDurations) == false)
+        {
+            return false;
+        }
+        if (timeUnit != other.timeUnit)
+        {
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public String toString()
+    {
+        return "HDF5TimeDurationArray [timeDurations=" + Arrays.toString(timeDurations)
+                + ", timeUnit=" + timeUnit + "]";
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationMDArray.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationMDArray.java
new file mode 100644
index 0000000..b56c15f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationMDArray.java
@@ -0,0 +1,577 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Iterator;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * A multi-dimensional array of time durations.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5TimeDurationMDArray extends MDAbstractArray<Long>
+{
+    private static final long serialVersionUID = 1L;
+
+    final MDLongArray timeDurations;
+
+    final HDF5TimeUnit timeUnit;
+
+    /**
+     * Creates an array of <var>timeDurations</var> using a common <var>timeUnit</var>.
+     */
+    public HDF5TimeDurationMDArray(MDLongArray timeDurations, HDF5TimeUnit timeUnit)
+    {
+        super(timeDurations.dimensions(), timeDurations.getAsFlatArray().length, 0);
+        this.timeDurations = timeDurations;
+        this.timeUnit = timeUnit;
+    }
+
+    /**
+     * Creates an array of <var>timeDurations</var> using a common <var>timeUnit</var>.
+     */
+    public HDF5TimeDurationMDArray(long[] timeDurations, int[] dimensions, HDF5TimeUnit timeUnit)
+    {
+        super(dimensions, timeDurations.length, 0);
+        this.timeDurations = new MDLongArray(timeDurations, dimensions, true);
+        this.timeUnit = timeUnit;
+    }
+
+    /**
+     * Creates an array of dimension <var>dimensions</var> with <var>timeUnit</var>.
+     */
+    public HDF5TimeDurationMDArray(int[] dimensions, HDF5TimeUnit timeUnit)
+    {
+        this(new long[getLength(dimensions, 0)], dimensions, timeUnit);
+    }
+
+    /**
+     * Creates an array of <var>timeDurations</var> using a common <var>timeUnit</var>.
+     */
+    public HDF5TimeDurationMDArray(HDF5TimeDuration[] timeDurations, int[] dimensions,
+            HDF5TimeUnit timeUnit)
+    {
+        super(dimensions, timeDurations.length, 0);
+        HDF5TimeUnit smallestTimeUnit = getSmallestUnit(timeDurations);
+        final long[] durations = new long[timeDurations.length];
+        if (timeUnit != smallestTimeUnit)
+        {
+            for (int i = 0; i < timeDurations.length; ++i)
+            {
+                durations[i] = timeUnit.convert(timeDurations[i]);
+            }
+        } else
+        {
+            for (int i = 0; i < timeDurations.length; ++i)
+            {
+                durations[i] = timeDurations[i].getValue();
+            }
+        }
+        this.timeDurations = new MDLongArray(durations, dimensions, true);
+        this.timeUnit = timeUnit;
+    }
+
+    /**
+     * Creates an array of <var>timeDurations</var> using the smallest time unit.
+     */
+    public HDF5TimeDurationMDArray(HDF5TimeDuration[] timeDurations, int[] dimensions)
+    {
+        super(dimensions, timeDurations.length, 0);
+        HDF5TimeUnit smallestTimeUnit = getSmallestUnit(timeDurations);
+        final long[] durations = new long[timeDurations.length];
+        for (int i = 0; i < timeDurations.length; ++i)
+        {
+            durations[i] = smallestTimeUnit.convert(timeDurations[i]);
+        }
+        this.timeDurations = new MDLongArray(durations, dimensions, true);
+        this.timeUnit = smallestTimeUnit;
+    }
+
+    private static HDF5TimeUnit getSmallestUnit(HDF5TimeDuration[] timeDurations)
+    {
+        HDF5TimeUnit unit = timeDurations[0].getUnit();
+        for (int i = 1; i < timeDurations.length; ++i)
+        {
+            final HDF5TimeUnit u = timeDurations[i].getUnit();
+            if (u != unit)
+            {
+                if (u.ordinal() < unit.ordinal())
+                {
+                    unit = u;
+                }
+            }
+        }
+        return unit;
+    }
+
+    /**
+     * Returns the time unit.
+     */
+    public HDF5TimeUnit getUnit()
+    {
+        return timeUnit;
+    }
+
+    /**
+     * Returns the time duration values as a flat array.
+     */
+    @Override
+    public long[] getAsFlatArray()
+    {
+        return timeDurations.getAsFlatArray();
+    }
+
+    /**
+     * Returns the number of elements.
+     */
+    @Override
+    public int[] dimensions()
+    {
+        return timeDurations.dimensions();
+    }
+
+    /**
+     * Returns the number of elements.
+     */
+    @Override
+    public long[] longDimensions()
+    {
+        return timeDurations.longDimensions();
+    }
+
+    /**
+     * Returns the number of elements.
+     */
+    public int getLength()
+    {
+        return timeDurations.size();
+    }
+
+    /**
+     * Returns the time duration values.
+     */
+    public MDLongArray getValues()
+    {
+        return timeDurations;
+    }
+    
+    /**
+     * Returns the time duration values in the given <var>targetUnit</var>.
+     */
+    public MDLongArray getValues(HDF5TimeUnit targetUnit)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return timeDurations;
+        }
+        final long[] sourceDurations = timeDurations.getAsFlatArray();
+        final long[] targetDurations = new long[sourceDurations.length];
+        for (int i = 0; i < targetDurations.length; ++i)
+        {
+            targetDurations[i] = targetUnit.convert(sourceDurations[i], timeUnit);
+        }
+        return new MDLongArray(targetDurations, timeDurations.dimensions());
+    }
+
+    /**
+     * Returns the time duration values as a flat array in the given <var>targetUnit</var>.
+     */
+    public long[] getAsFlatArray(HDF5TimeUnit targetUnit)
+    {
+        return getValues(targetUnit).getAsFlatArray();
+    }
+
+    /**
+     * Returns the value of a one-dimensional array at the position defined by <var>index</var> in
+     * the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public HDF5TimeDuration get(HDF5TimeUnit targetUnit, int index)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return new HDF5TimeDuration(timeDurations.get(index), timeUnit);
+        } else
+        {
+            return new HDF5TimeDuration(targetUnit.convert(timeDurations.get(index), timeUnit),
+                    targetUnit);
+        }
+    }
+
+    /**
+     * Returns the value of a two-dimensional array at the position defined by <var>indexX</var> and
+     * <var>indexY</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than two-dimensional!</b>
+     */
+    public HDF5TimeDuration get(HDF5TimeUnit targetUnit, int indexX, int indexY)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return new HDF5TimeDuration(timeDurations.get(indexX, indexY), timeUnit);
+        } else
+        {
+            return new HDF5TimeDuration(targetUnit.convert(timeDurations.get(indexX, indexY),
+                    timeUnit), targetUnit);
+        }
+    }
+
+    /**
+     * Returns the value of a three-dimensional array at the position defined by <var>indexX</var>,
+     * <var>indexY</var> and <var>indexZ</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than three-dimensional!</b>
+     */
+    public HDF5TimeDuration get(HDF5TimeUnit targetUnit, int indexX, int indexY, int indexZ)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return new HDF5TimeDuration(timeDurations.get(indexX, indexY, indexZ), timeUnit);
+        } else
+        {
+            return new HDF5TimeDuration(targetUnit.convert(
+                    timeDurations.get(indexX, indexY, indexZ), timeUnit), targetUnit);
+        }
+    }
+
+    /**
+     * Returns the value of array at the position defined by <var>indices</var> in the given
+     * <var>targetUnit</var>.
+     */
+    public HDF5TimeDuration get(HDF5TimeUnit targetUnit, int... indices)
+    {
+        if (targetUnit == timeUnit)
+        {
+            return new HDF5TimeDuration(timeDurations.get(indices), timeUnit);
+        } else
+        {
+            return new HDF5TimeDuration(targetUnit.convert(timeDurations.get(indices), timeUnit),
+                    targetUnit);
+        }
+    }
+
+    /**
+     * Returns the value element <var>index</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public long getValue(int index)
+    {
+        return timeDurations.get(index);
+    }
+
+    /**
+     * Returns the value element <var>(indexX,indexY)</var>.
+     * <p>
+     * <b>Do not call for arrays other than two-dimensional!</b>
+     */
+    public long getValue(int indexX, int indexY)
+    {
+        return timeDurations.get(indexX, indexY);
+    }
+
+    /**
+     * Returns the value element <var>(indexX,indexY,indexZ)</var>.
+     * <p>
+     * <b>Do not call for arrays other than three-dimensional!</b>
+     */
+    public long getValue(int indexX, int indexY, int indexZ)
+    {
+        return timeDurations.get(indexX, indexY, indexZ);
+    }
+
+    /**
+     * Returns the value element <var>indices</var>.
+     */
+    public long getValue(int... indices)
+    {
+        return timeDurations.get(indices);
+    }
+
+    /**
+     * Returns the value element <var>index</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public long getValue(HDF5TimeUnit targetUnit, int index)
+    {
+        return (targetUnit == timeUnit) ? timeDurations.get(index) : targetUnit.convert(
+                timeDurations.get(index), timeUnit);
+    }
+
+    /**
+     * Returns the value element <var>index</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public long getValue(HDF5TimeUnit targetUnit, int indexX, int indexY)
+    {
+        return (targetUnit == timeUnit) ? timeDurations.get(indexX, indexY) : targetUnit.convert(
+                timeDurations.get(indexX, indexY), timeUnit);
+    }
+
+    /**
+     * Returns the value element <var>index</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public long getValue(HDF5TimeUnit targetUnit, int indexX, int indexY, int indexZ)
+    {
+        return (targetUnit == timeUnit) ? timeDurations.get(indexX, indexY, indexZ) : targetUnit
+                .convert(timeDurations.get(indexX, indexY, indexZ), timeUnit);
+    }
+
+    /**
+     * Returns the value element <var>index</var> in the given <var>targetUnit</var>.
+     * <p>
+     * <b>Do not call for arrays other than one-dimensional!</b>
+     */
+    public long getValue(HDF5TimeUnit targetUnit, int... indices)
+    {
+        return (targetUnit == timeUnit) ? timeDurations.get(indices) : targetUnit.convert(
+                timeDurations.get(indices), timeUnit);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + timeDurations.hashCode();
+        result = prime * result + ((timeUnit == null) ? 0 : timeUnit.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (this == obj)
+        {
+            return true;
+        }
+        if (obj == null)
+        {
+            return false;
+        }
+        if (getClass() != obj.getClass())
+        {
+            return false;
+        }
+        final HDF5TimeDurationMDArray other = (HDF5TimeDurationMDArray) obj;
+        if (timeDurations.equals(other.timeDurations) == false)
+        {
+            return false;
+        }
+        if (timeUnit != other.timeUnit)
+        {
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public String toString()
+    {
+        return "HDF5TimeDurationArray [timeDurations=" + timeDurations.toString() + ", timeUnit="
+                + timeUnit + "]";
+    }
+
+    @Override
+    public Long getAsObject(int linearIndex)
+    {
+        return timeDurations.getAsObject(linearIndex);
+    }
+
+    @Override
+    public Long getAsObject(int... indices)
+    {
+        return timeDurations.getAsObject(indices);
+    }
+
+    @Override
+    public void setToObject(Long value, int... indices)
+    {
+        timeDurations.setToObject(value, indices);
+    }
+
+    @Override
+    public void setToObject(Long value, int linearIndex)
+    {
+        timeDurations.setToObject(value, linearIndex);
+    }
+
+    @Override
+    public long[] getCopyAsFlatArray()
+    {
+        return timeDurations.getCopyAsFlatArray();
+    }
+
+    @Override
+    protected void adaptCapacityHyperRows()
+    {
+        // Noop
+    }
+
+    @Override
+    public int capacity()
+    {
+        return timeDurations.capacity();
+    }
+
+    @Override
+    public int incNumberOfHyperRows(int count)
+    {
+        return timeDurations.incNumberOfHyperRows(count);
+    }
+
+    @Override
+    public int rank()
+    {
+        return timeDurations.rank();
+    }
+
+    @Override
+    public int size(int dim)
+    {
+        return timeDurations.size(dim);
+    }
+
+    @Override
+    public int size()
+    {
+        return timeDurations.size();
+    }
+
+    @Override
+    public int numberOfHyperRows()
+    {
+        return timeDurations.numberOfHyperRows();
+    }
+
+    @Override
+    public int decNumberOfHyperRows(int count)
+    {
+        return timeDurations.decNumberOfHyperRows(count);
+    }
+
+    @Override
+    public int computeIndex(int... indices)
+    {
+        return timeDurations.computeIndex(indices);
+    }
+
+    @Override
+    public int[] computeReverseIndex(int linearIndex)
+    {
+        return timeDurations.computeReverseIndex(linearIndex);
+    }
+
+    @Override
+    public int computeIndex(int indexX, int indexY)
+    {
+        return timeDurations.computeIndex(indexX, indexY);
+    }
+
+    @Override
+    public int computeIndex(int indexX, int indexY, int indexZ)
+    {
+        return timeDurations.computeIndex(indexX, indexY, indexZ);
+    }
+
+    @Override
+    public Iterator<ch.systemsx.cisd.base.mdarray.MDAbstractArray<Long>.ArrayEntry> iterator()
+    {
+        return timeDurations.iterator();
+    }
+
+    /**
+     * @see MDLongArray#get(int)
+     */
+    public long get(int index)
+    {
+        return timeDurations.get(index);
+    }
+
+    /**
+     * @see MDLongArray#get(int, int)
+     */
+    public long get(int indexX, int indexY)
+    {
+        return timeDurations.get(indexX, indexY);
+    }
+
+    /**
+     * @see MDLongArray#get(int, int, int)
+     */
+    public long get(int indexX, int indexY, int indexZ)
+    {
+        return timeDurations.get(indexX, indexY, indexZ);
+    }
+
+    /**
+     * @see MDLongArray#get(int[])
+     */
+    public long get(int... indices)
+    {
+        return timeDurations.get(indices);
+    }
+
+    /**
+     * @see MDLongArray#set(long, int)
+     */
+    public void set(long value, int index)
+    {
+        timeDurations.set(value, index);
+    }
+
+    /**
+     * @see MDLongArray#set(long, int, int)
+     */
+    public void set(long value, int indexX, int indexY)
+    {
+        timeDurations.set(value, indexX, indexY);
+    }
+
+    /**
+     * @see MDLongArray#set(long, int, int, int)
+     */
+    public void set(long value, int indexX, int indexY, int indexZ)
+    {
+        timeDurations.set(value, indexX, indexY, indexZ);
+    }
+
+    /**
+     * @see MDLongArray#set(long, int[])
+     */
+    public void set(long value, int... indices)
+    {
+        timeDurations.set(value, indices);
+    }
+
+    /**
+     * @see MDLongArray#toMatrix()
+     */
+    public long[][] toMatrix()
+    {
+        return timeDurations.toMatrix();
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReader.java
new file mode 100644
index 0000000..7e1eca1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReader.java
@@ -0,0 +1,662 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * Implementation of {@Link IHDF5TimeDurationReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5TimeDurationReader implements IHDF5TimeDurationReader
+{
+
+    private final HDF5BaseReader baseReader;
+
+    private final HDF5LongReader longReader;
+
+    HDF5TimeDurationReader(HDF5BaseReader baseReader, HDF5LongReader longReader)
+    {
+        assert baseReader != null;
+        assert longReader != null;
+
+        this.baseReader = baseReader;
+        this.longReader = longReader;
+    }
+
+    @Override
+    public HDF5TimeDuration getAttr(final String objectPath, final String attributeName)
+    {
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDuration> getAttributeRunnable =
+                new ICallableWithCleanUp<HDF5TimeDuration>()
+                    {
+                        @Override
+                        public HDF5TimeDuration call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final int attributeId =
+                                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+                            final HDF5TimeUnit unit =
+                                    baseReader.checkIsTimeDuration(objectPath, attributeName,
+                                            objectId, registry);
+                            final long[] data =
+                                    baseReader.h5.readAttributeAsLongArray(attributeId,
+                                            H5T_NATIVE_INT64, 1);
+                            return new HDF5TimeDuration(data[0], unit);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public boolean isTimeDuration(String objectPath, String attributeName) throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull =
+                baseReader.tryGetTypeVariant(objectPath, attributeName);
+        return typeVariantOrNull != null && typeVariantOrNull.isTimeDuration();
+    }
+
+    @Override
+    public HDF5TimeUnit tryGetTimeUnit(String objectPath, String attributeName)
+            throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull =
+                baseReader.tryGetTypeVariant(objectPath, attributeName);
+        return (typeVariantOrNull != null) ? typeVariantOrNull.tryGetTimeUnit() : null;
+    }
+
+    @Override
+    public HDF5TimeDurationArray getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationArray> getAttributeRunnable =
+                new ICallableWithCleanUp<HDF5TimeDurationArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, attributeName,
+                                            objectId, registry);
+                            final long[] data = longReader.getArrayAttr(objectPath, attributeName);
+                            return new HDF5TimeDurationArray(data, storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public HDF5TimeDurationMDArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationMDArray> getAttributeRunnable =
+                new ICallableWithCleanUp<HDF5TimeDurationMDArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationMDArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, attributeName,
+                                            objectId, registry);
+                            final MDLongArray data =
+                                    longReader.getMDArrayAttr(objectPath, attributeName);
+                            return new HDF5TimeDurationMDArray(data, storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public boolean isTimeDuration(final String objectPath) throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull = baseReader.tryGetTypeVariant(objectPath);
+        return typeVariantOrNull != null && typeVariantOrNull.isTimeDuration();
+    }
+
+    @Override
+    public HDF5TimeUnit tryGetTimeUnit(final String objectPath) throws HDF5JavaException
+    {
+        final HDF5DataTypeVariant typeVariantOrNull = baseReader.tryGetTypeVariant(objectPath);
+        return (typeVariantOrNull != null) ? typeVariantOrNull.tryGetTimeUnit() : null;
+    }
+
+    @Override
+    public HDF5TimeDuration read(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDuration> readCallable =
+                new ICallableWithCleanUp<HDF5TimeDuration>()
+                    {
+                        @Override
+                        public HDF5TimeDuration call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                            final long[] data = new long[1];
+                            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64, data);
+                            return new HDF5TimeDuration(data[0], storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    public long readTimeDuration(final String objectPath, final HDF5TimeUnit timeUnit)
+            throws HDF5JavaException
+    {
+        return timeUnit.convert(read(objectPath));
+    }
+
+    public HDF5TimeDuration readTimeDurationAndUnit(final String objectPath)
+            throws HDF5JavaException
+    {
+        return read(objectPath);
+    }
+
+    @Override
+    public HDF5TimeDurationArray readArray(final String objectPath) throws HDF5JavaException
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationArray> readCallable =
+                new ICallableWithCleanUp<HDF5TimeDurationArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, registry);
+                            final long[] data = new long[spaceParams.blockSize];
+                            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            return new HDF5TimeDurationArray(data, storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    public long[] readTimeDurationArray(final String objectPath, final HDF5TimeUnit timeUnit)
+            throws HDF5JavaException
+    {
+        return timeUnit.convert(readArray(objectPath));
+    }
+
+    public HDF5TimeDuration[] readTimeDurationAndUnitArray(final String objectPath)
+            throws HDF5JavaException
+    {
+        final HDF5TimeDurationArray durations = readArray(objectPath);
+        return convertTimeDurations(durations.timeUnit, durations.timeDurations);
+    }
+
+    public long[] readTimeDurationArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber, final HDF5TimeUnit timeUnit)
+    {
+        return timeUnit.convert(readArrayBlock(objectPath, blockSize, blockNumber));
+    }
+
+    @Override
+    public HDF5TimeDurationArray readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public HDF5TimeDurationArray readArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationArray> readCallable =
+                new ICallableWithCleanUp<HDF5TimeDurationArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset, blockSize,
+                                            registry);
+                            final long[] data = new long[spaceParams.blockSize];
+                            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, data);
+                            return new HDF5TimeDurationArray(data, storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    public long[] readTimeDurationArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset, final HDF5TimeUnit timeUnit)
+    {
+        return timeUnit.convert(readArrayBlockWithOffset(objectPath, blockSize, offset));
+    }
+
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlock(final String objectPath,
+            final int blockSize, final long blockNumber) throws HDF5JavaException
+    {
+        return readTimeDurationAndUnitArrayBlockWithOffset(objectPath, blockSize, blockSize
+                * blockNumber);
+    }
+
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset) throws HDF5JavaException
+    {
+        final HDF5TimeDurationArray durations =
+                readArrayBlockWithOffset(objectPath, blockSize, offset);
+        return convertTimeDurations(durations.timeUnit, durations.timeDurations);
+    }
+
+    public Iterable<HDF5DataBlock<HDF5TimeDuration[]>> getTimeDurationAndUnitArrayNaturalBlocks(
+            final String objectPath) throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        return new Iterable<HDF5DataBlock<HDF5TimeDuration[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<HDF5TimeDuration[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<HDF5TimeDuration[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<HDF5TimeDuration[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final HDF5TimeDuration[] block =
+                                        readTimeDurationAndUnitArrayBlockWithOffset(objectPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5DataBlock<HDF5TimeDuration[]>(block,
+                                        index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public HDF5TimeDurationMDArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationMDArray> readCallable =
+                new ICallableWithCleanUp<HDF5TimeDurationMDArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationMDArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                            return new HDF5TimeDurationMDArray(longReader.readLongMDArray(
+                                    dataSetId, registry), storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public HDF5TimeDurationMDArray readMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public HDF5TimeDurationMDArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<HDF5TimeDurationMDArray> readCallable =
+                new ICallableWithCleanUp<HDF5TimeDurationMDArray>()
+                    {
+                        @Override
+                        public HDF5TimeDurationMDArray call(ICleanUpRegistry registry)
+                        {
+                            final int dataSetId =
+                                    baseReader.h5.openDataSet(baseReader.fileId, objectPath,
+                                            registry);
+                            final HDF5TimeUnit storedUnit =
+                                    baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                            final DataSpaceParameters spaceParams =
+                                    baseReader.getSpaceParameters(dataSetId, offset,
+                                            blockDimensions, registry);
+                            final long[] dataBlock = new long[spaceParams.blockSize];
+                            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_INT64,
+                                    spaceParams.memorySpaceId, spaceParams.dataSpaceId, dataBlock);
+                            return new HDF5TimeDurationMDArray(new MDLongArray(dataBlock,
+                                    blockDimensions), storedUnit);
+                        }
+                    };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath,
+            final HDF5TimeDurationMDArray array, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final HDF5TimeUnit storedUnit =
+                            baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset,
+                                    array.dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                            array.getAsFlatArray());
+                    final int[] effectiveBlockDims = MDArray.toInt(spaceParams.dimensions); 
+                    if (array.getUnit() != storedUnit)
+                    {
+                        convertUnit(array.getValues(), storedUnit, array.getUnit(),
+                                effectiveBlockDims, memoryOffset);
+                    }
+                    return effectiveBlockDims;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDurationMDArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final HDF5TimeUnit storedUnit =
+                            baseReader.checkIsTimeDuration(objectPath, dataSetId, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset,
+                                    array.dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_INT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId,
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                            array.getAsFlatArray());
+                    final int[] effectiveBlockDims = MDArray.toInt(spaceParams.dimensions); 
+                    if (array.getUnit() != storedUnit)
+                    {
+                        convertUnit(array.getValues(), storedUnit, array.getUnit(),
+                                effectiveBlockDims, memoryOffset);
+                    }
+                    return effectiveBlockDims;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    static void convertUnit(MDLongArray array, HDF5TimeUnit fromUnit, HDF5TimeUnit toUnit,
+            int[] dims, int[] offset)
+    {
+        final long[] flatArray = array.getAsFlatArray();
+        final int[] idx = offset.clone();
+        System.arraycopy(offset, 0, idx, 0, idx.length);
+        while (true)
+        {
+            final int linIdx = array.computeIndex(idx);
+            flatArray[linIdx] = toUnit.convert(flatArray[linIdx], fromUnit);
+            if (MatrixUtils.incrementIdx(idx, dims, offset) == false)
+            {
+                break;
+            }
+        }
+    }
+    
+    @Override
+    public Iterable<HDF5DataBlock<HDF5TimeDurationArray>> getArrayNaturalBlocks(
+            final String objectPath) throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        return new Iterable<HDF5DataBlock<HDF5TimeDurationArray>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<HDF5TimeDurationArray>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<HDF5TimeDurationArray>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<HDF5TimeDurationArray> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final HDF5TimeDurationArray block =
+                                        readArrayBlockWithOffset(objectPath, index.getBlockSize(),
+                                                offset);
+                                return new HDF5DataBlock<HDF5TimeDurationArray>(block,
+                                        index.getAndIncIndex(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    public Iterable<HDF5DataBlock<long[]>> getTimeDurationArrayNaturalBlocks(
+            final String objectPath, final HDF5TimeUnit timeUnit) throws HDF5JavaException
+    {
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(objectPath));
+
+        return new Iterable<HDF5DataBlock<long[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<long[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<long[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<long[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final long[] block =
+                                        readTimeDurationArrayBlockWithOffset(objectPath,
+                                                index.getBlockSize(), offset, timeUnit);
+                                return new HDF5DataBlock<long[]>(block, index.getAndIncIndex(),
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<HDF5TimeDurationMDArray>> getMDArrayNaturalBlocks(
+            final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<HDF5TimeDurationMDArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<HDF5TimeDurationMDArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<HDF5TimeDurationMDArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<HDF5TimeDurationMDArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final HDF5TimeDurationMDArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath,
+                                                index.getBlockSize(), offset);
+                                return new HDF5MDDataBlock<HDF5TimeDurationMDArray>(data,
+                                        index.getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    static void convertTimeDurations(final HDF5TimeUnit toTimeUnit,
+            final HDF5TimeUnit fromTimeUnit, final long[] data)
+    {
+        if (toTimeUnit != fromTimeUnit)
+        {
+            for (int i = 0; i < data.length; ++i)
+            {
+                data[i] = toTimeUnit.convert(data[i], fromTimeUnit);
+            }
+        }
+    }
+
+    static HDF5TimeDuration[] convertTimeDurations(final HDF5TimeUnit timeUnit, final long[] data)
+    {
+        final HDF5TimeDuration[] durations = new HDF5TimeDuration[data.length];
+        for (int i = 0; i < data.length; ++i)
+        {
+            durations[i] = new HDF5TimeDuration(data[i], timeUnit);
+        }
+        return durations;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationWriter.java
new file mode 100644
index 0000000..d0910a7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationWriter.java
@@ -0,0 +1,625 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_INT64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * Implementation of {@link IHDF5TimeDurationWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5TimeDurationWriter extends HDF5TimeDurationReader implements
+        IHDF5TimeDurationWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5TimeDurationWriter(HDF5BaseWriter baseWriter, HDF5LongReader longReader)
+    {
+        super(baseWriter, longReader);
+
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final long timeDuration,
+            final HDF5TimeUnit timeUnit)
+    {
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { 1 }, registry);
+                        baseWriter.setAttribute(objectPath, name, timeUnit.getTypeVariant(),
+                                H5T_STD_I64LE, H5T_NATIVE_INT64, dataSpaceId, new long[]
+                                    { timeDuration }, registry);
+                    } else
+                    {
+                        baseWriter.setAttribute(objectPath, name, timeUnit.getTypeVariant(),
+                                H5T_STD_I64LE, H5T_NATIVE_INT64, -1, new long[]
+                                    { timeDuration }, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setAttr(String objectPath, String name, HDF5TimeDuration timeDuration)
+    {
+        setAttr(objectPath, name, timeDuration.getValue(), timeDuration.getUnit());
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final HDF5TimeDurationArray timeDurations)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert timeDurations != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { timeDurations.timeDurations.length }, registry);
+                        baseWriter.setAttribute(objectPath, name,
+                                timeDurations.timeUnit.getTypeVariant(), H5T_STD_I64LE,
+                                H5T_NATIVE_INT64, dataSpaceId, timeDurations.timeDurations,
+                                registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64,
+                                        timeDurations.timeDurations.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE,
+                                        timeDurations.timeDurations.length, registry);
+                        baseWriter.setAttribute(objectPath, name,
+                                timeDurations.timeUnit.getTypeVariant(), storageTypeId,
+                                memoryTypeId, -1, timeDurations.timeDurations, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String attributeName,
+            final HDF5TimeDurationMDArray timeDurations)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+        assert timeDurations != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(
+                                        timeDurations.timeDurations.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, attributeName,
+                                timeDurations.timeUnit.getTypeVariant(), H5T_STD_I64LE,
+                                H5T_NATIVE_INT64, dataSpaceId,
+                                timeDurations.timeDurations.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_INT64,
+                                        timeDurations.timeDurations.dimensions(), registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_I64LE,
+                                        timeDurations.timeDurations.dimensions(), registry);
+                        baseWriter.setAttribute(objectPath, attributeName,
+                                timeDurations.timeUnit.getTypeVariant(), storageTypeId,
+                                memoryTypeId, -1, timeDurations.timeDurations.getAsFlatArray(),
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    public void writeTimeDuration(final String objectPath, final long timeDuration)
+    {
+        write(objectPath, timeDuration, HDF5TimeUnit.SECONDS);
+    }
+
+    @Override
+    public void write(final String objectPath, final long timeDuration, final HDF5TimeUnit timeUnit)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> writeScalarRunnable = new ICallableWithCleanUp<Object>()
+            {
+                @Override
+                public Object call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.writeScalar(objectPath, H5T_STD_I64LE, H5T_NATIVE_INT64,
+                                    HDFNativeData.longToByte(timeDuration), true, true, registry);
+                    baseWriter.setTypeVariant(dataSetId, timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeScalarRunnable);
+    }
+
+    @Override
+    public void write(String objectPath, HDF5TimeDuration timeDuration)
+    {
+        write(objectPath, timeDuration.getValue(), timeDuration.getUnit());
+    }
+
+    @Override
+    public void createArray(String objectPath, int size, HDF5TimeUnit timeUnit)
+    {
+        createArray(objectPath, size, timeUnit, HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5TimeUnit timeUnit)
+    {
+        createArray(objectPath, size, blockSize, timeUnit,
+                HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size, final HDF5TimeUnit timeUnit,
+            final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId;
+                    if (features.requiresChunking())
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                        new long[]
+                                            { 0 }, new long[]
+                                            { size }, longBytes, registry);
+                    } else
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                        new long[]
+                                            { size }, null, longBytes, registry);
+                    }
+                    baseWriter.setTypeVariant(dataSetId, timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5TimeUnit timeUnit, final HDF5GenericStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId =
+                            baseWriter.createDataSet(objectPath, H5T_STD_I64LE, features,
+                                    new long[]
+                                        { size }, new long[]
+                                        { blockSize }, longBytes, registry);
+                    baseWriter.setTypeVariant(dataSetId, timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations)
+    {
+        writeTimeDurationArray(objectPath, timeDurations, HDF5TimeUnit.SECONDS,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(String objectPath, HDF5TimeDurationArray timeDurations)
+    {
+        writeArray(objectPath, timeDurations, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final HDF5TimeDurationArray timeDurations,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert timeDurations != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int longBytes = 8;
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_I64LE, new long[]
+                                { timeDurations.timeDurations.length }, longBytes, features,
+                                    registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            timeDurations.timeDurations);
+                    baseWriter.setTypeVariant(dataSetId, timeDurations.timeUnit.getTypeVariant(),
+                            registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations,
+            final HDF5TimeUnit timeUnit)
+    {
+        writeTimeDurationArray(objectPath, timeDurations, timeUnit,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations,
+            final HDF5TimeUnit timeUnit, final HDF5IntStorageFeatures features)
+    {
+        writeArray(objectPath, new HDF5TimeDurationArray(timeDurations, timeUnit));
+    }
+
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDuration[] timeDurations)
+    {
+        writeTimeDurationArray(objectPath, timeDurations, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDuration[] timeDurations, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert timeDurations != null;
+
+        if (timeDurations.length == 0)
+        {
+            return;
+        }
+        final HDF5TimeDurationArray durations = HDF5TimeDurationArray.create(timeDurations);
+        writeArray(objectPath, durations);
+    }
+
+    @Override
+    public void writeArrayBlock(String objectPath, HDF5TimeDurationArray data, long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.getLength(), data.getLength()
+                * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDurationArray data, final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final HDF5TimeUnit storedUnit =
+                            baseWriter.checkIsTimeDuration(objectPath, dataSetId, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            storedUnit.convert(data));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeTimeDurationArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber, final HDF5TimeUnit timeUnit)
+    {
+        writeTimeDurationArrayBlockWithOffset(objectPath, data, data.length, data.length
+                * blockNumber, timeUnit);
+    }
+
+    public void writeTimeDurationArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset, final HDF5TimeUnit timeUnit)
+    {
+        writeArrayBlockWithOffset(objectPath, new HDF5TimeDurationArray(data, timeUnit), dataSize,
+                offset);
+    }
+
+    public void writeTimeDurationArrayBlock(final String objectPath, final HDF5TimeDuration[] data,
+            final long blockNumber)
+    {
+        writeTimeDurationArrayBlockWithOffset(objectPath, data, data.length, data.length
+                * blockNumber);
+    }
+
+    public void writeTimeDurationArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDuration[] data, final int dataSize, final long offset)
+    {
+        writeArrayBlockWithOffset(objectPath, HDF5TimeDurationArray.create(data), dataSize, offset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final HDF5TimeDurationMDArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath,
+                                    features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                    data.longDimensions(), 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    baseWriter.setTypeVariant(dataSetId, data.timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final HDF5TimeDurationMDArray data)
+    {
+        writeMDArray(objectPath, data, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5TimeUnit timeUnit)
+    {
+        createMDArray(objectPath, dimensions, timeUnit, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5TimeUnit timeUnit)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, timeUnit, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5TimeUnit timeUnit, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId;
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                        features, nullDimensions, MDArray.toLong(dimensions), 8,
+                                        registry);
+                    } else
+                    {
+                        dataSetId =
+                                baseWriter.createDataSet(objectPath,
+                                        features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE,
+                                        features, MDArray.toLong(dimensions), null, 8, registry);
+                    }
+                    baseWriter.setTypeVariant(dataSetId, timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5TimeUnit timeUnit,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.createDataSet(objectPath,
+                                    features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE, features,
+                                    dimensions, MDArray.toLong(blockDimensions), 8, registry);
+                    baseWriter.setTypeVariant(dataSetId, timeUnit.getTypeVariant(), registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final HDF5TimeDurationMDArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDurationMDArray data, final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final HDF5TimeUnit storedUnit =
+                            baseWriter.checkIsTimeDuration(objectPath, dataSetId, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data.getAsFlatArray(storedUnit));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDurationMDArray data, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId =
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    final HDF5TimeUnit storedUnit =
+                            baseWriter.checkIsTimeDuration(objectPath, dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId =
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_INT64, memorySpaceId, dataSpaceId, H5P_DEFAULT,
+                            data.getAsFlatArray(storedUnit));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5TimeUnit.java b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeUnit.java
new file mode 100644
index 0000000..8647c45
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5TimeUnit.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * A <code>TimeUnit</code> represents a unit of a time duration. Each unit corresponds to one time
+ * duration {@link HDF5DataTypeVariant}.
+ * <p>
+ * The conversion of time durations is heavily inspired by Doug Lea's class in the Java runtime
+ * library.
+ * 
+ * @author Bernd Rinn
+ */
+public enum HDF5TimeUnit
+{
+    MICROSECONDS, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS;
+
+    /** Lookup table for type variants. */
+    private static final HDF5DataTypeVariant[] typeVariants =
+        { HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS,
+                HDF5DataTypeVariant.TIME_DURATION_MILLISECONDS,
+                HDF5DataTypeVariant.TIME_DURATION_SECONDS,
+                HDF5DataTypeVariant.TIME_DURATION_MINUTES, HDF5DataTypeVariant.TIME_DURATION_HOURS,
+                HDF5DataTypeVariant.TIME_DURATION_DAYS };
+
+    /** Lookup table for conversion factors (to smaller units). */
+    private static final long[][] multipliers =
+        {
+                // first dimension is the start unit, second is the delta
+                // micro seconds
+                { 1L },
+                // milli seconds
+                { 1L, 1000L },
+                // seconds
+                { 1L, 1000L, 1000L * 1000 },
+                // minutes
+                { 1L, 60L, 60L * 1000, 60L * 1000 * 1000 },
+                // hours
+                { 1L, 60L, 60L * 60, 60L * 60 * 1000, 60L * 60 * 1000 * 1000 },
+                // days
+                { 1L, 24L, 24L * 60, 24L * 60 * 60, 24L * 60 * 60 * 1000L,
+                        24L * 60 * 60 * 1000 * 1000 } };
+
+    /** Lookup table for conversion factors (to larger units). */
+    private static final double[][] divisors =
+        {
+                // first dimension is the start unit, second is the delta
+                // micro seconds
+                { 1.0, 1000.0, 1000.0 * 1000, 1000.0 * 1000 * 60, 1000.0 * 1000 * 60 * 60,
+                        1000.0 * 1000 * 60 * 60 * 24 },
+                // millis seconds
+                { 1.0, 1000.0, 1000.0 * 60, 1000.0 * 60 * 60, 1000.0 * 60 * 60 * 24 },
+                // seconds
+                { 1.0, 60.0, 60.0 * 60, 60 * 60 * 24 },
+                // minutes
+                { 1.0, 60.0, 60.0 * 24 },
+                // hours
+                { 1.0, 24.0 },
+                // days
+                { 1.0 } };
+
+    /**
+     * Lookup table to check saturation. Note that because we are dividing these down, we don't have
+     * to deal with asymmetry of MIN/MAX values.
+     */
+    private static final long[][] overflows =
+        {
+                // first dimension is the start unit, second is the delta
+                // micro seconds
+                { -1 },
+                // milli seconds
+                { -1, Long.MAX_VALUE / 1000L },
+                // seconds
+                { -1, Long.MAX_VALUE / 1000L, Long.MAX_VALUE / (1000L * 1000) },
+                // minutes
+                { -1, Long.MAX_VALUE / 60L, Long.MAX_VALUE / (60L * 1000),
+                        Long.MAX_VALUE / (60L * 1000 * 1000) },
+                // hours
+                { -1, Long.MAX_VALUE / 60L, Long.MAX_VALUE / (60L * 60),
+                        Long.MAX_VALUE / (60L * 60 * 1000),
+                        Long.MAX_VALUE / (60L * 60 * 1000 * 1000) },
+                // days
+                { -1, Long.MAX_VALUE / 24L, Long.MAX_VALUE / (24L * 60),
+                        Long.MAX_VALUE / (24L * 60 * 60), Long.MAX_VALUE / (24L * 60 * 60 * 1000),
+                        Long.MAX_VALUE / (24L * 60 * 60 * 1000 * 1000) } };
+
+    private static long doConvert(int ordinal, int delta, long duration)
+    {
+        if (delta == 0)
+        {
+            return duration;
+        }
+        if (delta < 0)
+        {
+            return Math.round(duration / divisors[ordinal][-delta]);
+        }
+        final long overflow = overflows[ordinal][delta];
+        if (duration > overflow)
+        {
+            return Long.MAX_VALUE;
+        }
+        if (duration < -overflow)
+        {
+            return Long.MIN_VALUE;
+        }
+        return duration * multipliers[ordinal][delta];
+    }
+
+    /**
+     * Returns the type variant corresponding to this unit.
+     */
+    public HDF5DataTypeVariant getTypeVariant()
+    {
+        return typeVariants[ordinal()];
+    }
+
+    /**
+     * Convert the given time duration in the given unit to this unit. Conversions from smaller to
+     * larger units perform rounding, so they lose precision. Conversions from larger to smaller
+     * units with arguments that would numerically overflow saturate to <code>Long.MIN_VALUE</code>
+     * if negative or <code>Long.MAX_VALUE</code> if positive.
+     * 
+     * @param duration The time duration in the given <code>unit</code>.
+     * @param unit The unit of the <code>duration</code> argument.
+     * @return The converted duration in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public long convert(long duration, HDF5TimeUnit unit)
+    {
+        final int currentUnitOrdinal = unit.ordinal();
+        return doConvert(currentUnitOrdinal, currentUnitOrdinal - ordinal(), duration);
+    }
+
+    /**
+     * Convert the given time <var>durations</var> in the given time <var>unit</var> to this unit.
+     * Conversions from smaller to larger units perform rounding, so they lose precision.
+     * Conversions from larger to smaller units with arguments that would numerically overflow
+     * saturate to <code>Long.MIN_VALUE</code> if negative or <code>Long.MAX_VALUE</code> if
+     * positive.
+     * 
+     * @param durations The time durations.
+     * @return The converted duration in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public long[] convert(final HDF5TimeDurationArray durations)
+    {
+        if (this != durations.timeUnit)
+        {
+            final long[] convertedData = new long[durations.timeDurations.length];
+            for (int i = 0; i < durations.timeDurations.length; ++i)
+            {
+                convertedData[i] = this.convert(durations.timeDurations[i], durations.timeUnit);
+            }
+            return convertedData;
+        } else
+        {
+            return durations.timeDurations;
+        }
+    }
+
+    /**
+     * Convert the given time <var>durations</var> in the given time <var>unit</var> to this unit.
+     * Conversions from smaller to larger units perform rounding, so they lose precision.
+     * Conversions from larger to smaller units with arguments that would numerically overflow
+     * saturate to <code>Long.MIN_VALUE</code> if negative or <code>Long.MAX_VALUE</code> if
+     * positive.
+     * 
+     * @param durations The time durations.
+     * @return The converted duration in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public HDF5TimeDurationMDArray convert(final HDF5TimeDurationMDArray durations)
+    {
+        if (this != durations.timeUnit)
+        {
+            final long[] originalData = durations.getAsFlatArray();
+            final long[] convertedData = new long[originalData.length];
+            for (int i = 0; i < originalData.length; ++i)
+            {
+                convertedData[i] = this.convert(originalData[i], durations.timeUnit);
+            }
+            return new HDF5TimeDurationMDArray(convertedData, durations.dimensions(), this);
+        } else
+        {
+            return durations;
+        }
+    }
+
+    /**
+     * Convert the given time <var>durations</var> in the given time <var>unit</var> to this unit.
+     * Conversions from smaller to larger units perform rounding, so they lose precision.
+     * Conversions from larger to smaller units with arguments that would numerically overflow
+     * saturate to <code>Long.MIN_VALUE</code> if negative or <code>Long.MAX_VALUE</code> if
+     * positive.
+     * 
+     * @param durations The time duration in the given <code>unit</code>.
+     * @param unit The unit of the <code>duration</code> argument.
+     * @return The converted duration in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public long[] convert(final long[] durations, final HDF5TimeUnit unit)
+    {
+        if (this != unit)
+        {
+            final long[] convertedData = new long[durations.length];
+            for (int i = 0; i < durations.length; ++i)
+            {
+                convertedData[i] = this.convert(durations[i], unit);
+            }
+            return convertedData;
+        } else
+        {
+            return durations;
+        }
+    }
+
+    /**
+     * Convert the given time duration in the given unit to this unit. Conversions from smaller to
+     * larger units perform rounding, so they lose precision. Conversions from larger to smaller
+     * units with arguments that would numerically overflow saturate to <code>Long.MIN_VALUE</code>
+     * if negative or <code>Long.MAX_VALUE</code> if positive.
+     * 
+     * @param duration The time duration and its unit.
+     * @return The converted duration in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public long convert(HDF5TimeDuration duration)
+    {
+        return convert(duration.getValue(), duration.getUnit());
+    }
+
+    /**
+     * Convert the given time <var>durations</var> to this unit. Conversions from smaller to larger
+     * units perform rounding, so they lose precision. Conversions from larger to smaller units with
+     * arguments that would numerically overflow saturate to <code>Long.MIN_VALUE</code> if negative
+     * or <code>Long.MAX_VALUE</code> if positive.
+     * 
+     * @return The converted durations in this unit, or <code>Long.MIN_VALUE</code> if conversion
+     *         would negatively overflow, or <code>Long.MAX_VALUE</code> if it would positively
+     *         overflow.
+     */
+    public long[] convert(final HDF5TimeDuration[] durations)
+    {
+        final long[] convertedData = new long[durations.length];
+        for (int i = 0; i < durations.length; ++i)
+        {
+            convertedData[i] = this.convert(durations[i]);
+        }
+        return convertedData;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteReader.java
new file mode 100644
index 0000000..a3ea73c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteReader.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT8;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5ByteReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedByteReader implements IHDF5ByteReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5UnsignedByteReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public byte getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Byte> getAttributeRunnable = new ICallableWithCleanUp<Byte>()
+            {
+                @Override
+                public Byte call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final byte[] data =
+                            baseReader.h5.readAttributeAsByteArray(attributeId, H5T_NATIVE_UINT8, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public byte[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> getAttributeRunnable =
+                new ICallableWithCleanUp<byte[]>()
+                    {
+                        @Override
+                        public byte[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getByteArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDByteArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDByteArray>()
+                    {
+                        @Override
+                        public MDByteArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getByteMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public byte[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDByteArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public byte read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Byte> readCallable = new ICallableWithCleanUp<Byte>()
+            {
+                @Override
+                public Byte call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final byte[] data = new byte[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT8, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readByteArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private byte[] readByteArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final byte[] data = new byte[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT8, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readByteArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private byte[] readByteArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final byte[] data = new byte[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT8, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDByteArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT8, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDByteArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT8, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public byte[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<byte[]> readCallable = new ICallableWithCleanUp<byte[]>()
+            {
+                @Override
+                public byte[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final byte[] data = new byte[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT8, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public byte[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public byte[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public byte[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDByteArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDByteArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDByteArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDByteArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions =
+                new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDByteArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDByteArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> readCallable = new ICallableWithCleanUp<MDByteArray>()
+            {
+                @Override
+                public MDByteArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readByteMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDByteArray readByteMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final byte[] data = new byte[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT8, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDByteArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readByteMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDByteArray readByteMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT8, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final byte[] data = new byte[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDByteArray(data, arrayDimensions);
+        } else
+        {
+            final byte[] data =
+                    new byte[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDByteArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDByteArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDByteArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDByteArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDByteArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDByteArray> readCallable = new ICallableWithCleanUp<MDByteArray>()
+            {
+                @Override
+                public MDByteArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final byte[] dataBlock = new byte[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT8,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDByteArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDByteArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+            }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final byte[] dataBlock =
+                new byte[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT8, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDByteArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<byte[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<byte[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<byte[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<byte[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<byte[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final byte[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<byte[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDByteArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDByteArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDByteArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDByteArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDByteArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDByteArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDByteArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    byte[] getByteArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_UINT8, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_UINT8;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final byte[] data =
+                baseReader.h5.readAttributeAsByteArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDByteArray getByteMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_UINT8,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_UINT8;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final byte[] data =
+                    baseReader.h5.readAttributeAsByteArray(attributeId,
+                            memoryTypeId, len);
+            return new MDByteArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteWriter.java
new file mode 100644
index 0000000..3df77a2
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedByteWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT8;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5ByteWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedByteWriter extends HDF5UnsignedByteReader implements IHDF5ByteWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5UnsignedByteWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final byte value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U8LE,
+                                        H5T_NATIVE_UINT8, dataSpaceId, new byte[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U8LE,
+                                        H5T_NATIVE_UINT8, -1, new byte[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final byte[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U8LE, H5T_NATIVE_UINT8,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT8, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U8LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDByteArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U8LE, H5T_NATIVE_UINT8,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT8, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U8LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final byte[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDByteArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final byte value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_U8LE, H5T_NATIVE_UINT8, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final byte[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final byte[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_STD_U8LE, new long[]
+                                { data.length }, 1, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT8, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                            features, new long[] { 0 }, new long[] { size }, 1, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                            features, new long[] { size }, null, 1, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 1, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final byte[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final byte[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT8, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final byte[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final byte[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDByteArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final byte[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDByteArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDByteArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDByteArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDByteArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDByteArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_U8LE, 
+                                    data.longDimensions(), 1, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT8, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 1, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                                features, MDArray.toLong(dimensions), null, 1, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U8LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 1, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDByteArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDByteArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDByteArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT8, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDByteArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT8, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntReader.java
new file mode 100644
index 0000000..ff2c9ab
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntReader.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT32;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5IntReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedIntReader implements IHDF5IntReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5UnsignedIntReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public int getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Integer> getAttributeRunnable = new ICallableWithCleanUp<Integer>()
+            {
+                @Override
+                public Integer call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final int[] data =
+                            baseReader.h5.readAttributeAsIntArray(attributeId, H5T_NATIVE_UINT32, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public int[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> getAttributeRunnable =
+                new ICallableWithCleanUp<int[]>()
+                    {
+                        @Override
+                        public int[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getIntArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDIntArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDIntArray>()
+                    {
+                        @Override
+                        public MDIntArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getIntMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public int[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDIntArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public int read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Integer> readCallable = new ICallableWithCleanUp<Integer>()
+            {
+                @Override
+                public Integer call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final int[] data = new int[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT32, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readIntArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private int[] readIntArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final int[] data = new int[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT32, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readIntArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private int[] readIntArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int[] data = new int[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT32, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDIntArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT32, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDIntArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT32, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public int[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final int[] data = new int[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT32, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public int[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public int[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDIntArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDIntArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDIntArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDIntArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions =
+                new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDIntArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDIntArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> readCallable = new ICallableWithCleanUp<MDIntArray>()
+            {
+                @Override
+                public MDIntArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readIntMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDIntArray readIntMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final int[] data = new int[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT32, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDIntArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readIntMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDIntArray readIntMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT32, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final int[] data = new int[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDIntArray(data, arrayDimensions);
+        } else
+        {
+            final int[] data =
+                    new int[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDIntArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDIntArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDIntArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDIntArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDIntArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDIntArray> readCallable = new ICallableWithCleanUp<MDIntArray>()
+            {
+                @Override
+                public MDIntArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final int[] dataBlock = new int[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT32,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDIntArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDIntArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+            }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final int[] dataBlock =
+                new int[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT32, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDIntArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<int[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<int[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<int[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<int[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<int[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final int[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<int[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDIntArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDIntArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDIntArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDIntArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDIntArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDIntArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDIntArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    int[] getIntArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_UINT32, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_UINT32;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final int[] data =
+                baseReader.h5.readAttributeAsIntArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDIntArray getIntMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_UINT32,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_UINT32;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final int[] data =
+                    baseReader.h5.readAttributeAsIntArray(attributeId,
+                            memoryTypeId, len);
+            return new MDIntArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntWriter.java
new file mode 100644
index 0000000..764760a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedIntWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT32;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5IntWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedIntWriter extends HDF5UnsignedIntReader implements IHDF5IntWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5UnsignedIntWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final int value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U32LE,
+                                        H5T_NATIVE_UINT32, dataSpaceId, new int[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U32LE,
+                                        H5T_NATIVE_UINT32, -1, new int[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final int[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U32LE, H5T_NATIVE_UINT32,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT32, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U32LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDIntArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U32LE, H5T_NATIVE_UINT32,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT32, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U32LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final int[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDIntArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final int value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_U32LE, H5T_NATIVE_UINT32, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final int[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final int[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_STD_U32LE, new long[]
+                                { data.length }, 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT32, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                            features, new long[] { 0 }, new long[] { size }, 4, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                            features, new long[] { size }, null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final int[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final int[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT32, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final int[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final int[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDIntArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final int[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDIntArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDIntArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDIntArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDIntArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDIntArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_U32LE, 
+                                    data.longDimensions(), 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT32, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 4, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                                features, MDArray.toLong(dimensions), null, 4, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U32LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 4, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDIntArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDIntArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDIntArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT32, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDIntArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT32, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongReader.java
new file mode 100644
index 0000000..8205358
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongReader.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT64;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5LongReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedLongReader implements IHDF5LongReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5UnsignedLongReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public long getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> getAttributeRunnable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final long[] data =
+                            baseReader.h5.readAttributeAsLongArray(attributeId, H5T_NATIVE_UINT64, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public long[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> getAttributeRunnable =
+                new ICallableWithCleanUp<long[]>()
+                    {
+                        @Override
+                        public long[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getLongArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDLongArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDLongArray>()
+                    {
+                        @Override
+                        public MDLongArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getLongMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public long[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDLongArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public long read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Long> readCallable = new ICallableWithCleanUp<Long>()
+            {
+                @Override
+                public Long call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final long[] data = new long[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readLongArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private long[] readLongArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final long[] data = new long[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readLongArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private long[] readLongArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final long[] data = new long[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT64, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDLongArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDLongArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT64, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public long[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<long[]> readCallable = new ICallableWithCleanUp<long[]>()
+            {
+                @Override
+                public long[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final long[] data = new long[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public long[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public long[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public long[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDLongArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDLongArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDLongArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDLongArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions =
+                new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDLongArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDLongArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+            {
+                @Override
+                public MDLongArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readLongMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDLongArray readLongMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final long[] data = new long[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDLongArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readLongMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDLongArray readLongMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT64, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final long[] data = new long[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDLongArray(data, arrayDimensions);
+        } else
+        {
+            final long[] data =
+                    new long[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDLongArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDLongArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDLongArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDLongArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDLongArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDLongArray> readCallable = new ICallableWithCleanUp<MDLongArray>()
+            {
+                @Override
+                public MDLongArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final long[] dataBlock = new long[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT64,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDLongArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDLongArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+            }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final long[] dataBlock =
+                new long[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT64, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDLongArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<long[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<long[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<long[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<long[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<long[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final long[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<long[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDLongArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDLongArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDLongArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDLongArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDLongArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDLongArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    long[] getLongArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_UINT64, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_UINT64;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final long[] data =
+                baseReader.h5.readAttributeAsLongArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDLongArray getLongMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_UINT64,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_UINT64;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final long[] data =
+                    baseReader.h5.readAttributeAsLongArray(attributeId,
+                            memoryTypeId, len);
+            return new MDLongArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongWriter.java
new file mode 100644
index 0000000..64c7d3d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedLongWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT64;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5LongWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedLongWriter extends HDF5UnsignedLongReader implements IHDF5LongWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5UnsignedLongWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final long value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U64LE,
+                                        H5T_NATIVE_UINT64, dataSpaceId, new long[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U64LE,
+                                        H5T_NATIVE_UINT64, -1, new long[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final long[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U64LE, H5T_NATIVE_UINT64,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT64, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U64LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDLongArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U64LE, H5T_NATIVE_UINT64,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT64, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U64LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final long[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDLongArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final long value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_U64LE, H5T_NATIVE_UINT64, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final long[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_STD_U64LE, new long[]
+                                { data.length }, 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                            features, new long[] { 0 }, new long[] { size }, 8, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                            features, new long[] { size }, null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT64, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final long[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final long[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDLongArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final long[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDLongArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDLongArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDLongArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDLongArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDLongArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_U64LE, 
+                                    data.longDimensions(), 8, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 8, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                                features, MDArray.toLong(dimensions), null, 8, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U64LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 8, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDLongArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT64, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDLongArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT64, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortReader.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortReader.java
new file mode 100644
index 0000000..61b2820
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortReader.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT16;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5ShortReader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedShortReader implements IHDF5ShortReader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5UnsignedShortReader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public short getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Short> getAttributeRunnable = new ICallableWithCleanUp<Short>()
+            {
+                @Override
+                public Short call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final short[] data =
+                            baseReader.h5.readAttributeAsShortArray(attributeId, H5T_NATIVE_UINT16, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public short[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> getAttributeRunnable =
+                new ICallableWithCleanUp<short[]>()
+                    {
+                        @Override
+                        public short[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getShortArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MDShortArray getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> getAttributeRunnable =
+                new ICallableWithCleanUp<MDShortArray>()
+                    {
+                        @Override
+                        public MDShortArray call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return getShortMDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public short[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MDShortArray array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public short read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<Short> readCallable = new ICallableWithCleanUp<Short>()
+            {
+                @Override
+                public Short call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final short[] data = new short[1];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT16, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> readCallable = new ICallableWithCleanUp<short[]>()
+            {
+                @Override
+                public short[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readShortArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private short[] readShortArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final short[] data = new short[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT16, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readShortArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private short[] readShortArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final short[] data = new short[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT16, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MDShortArray array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT16, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MDShortArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, H5T_NATIVE_UINT16, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public short[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<short[]> readCallable = new ICallableWithCleanUp<short[]>()
+            {
+                @Override
+                public short[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final short[] data = new short[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT16, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public short[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public short[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public short[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MDShortArray array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MDShortArray readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDShortArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDShortArray readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions =
+                new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MDShortArray(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MDShortArray readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> readCallable = new ICallableWithCleanUp<MDShortArray>()
+            {
+                @Override
+                public MDShortArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return readShortMDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MDShortArray readShortMDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final short[] data = new short[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT16, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDShortArray(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return readShortMDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MDShortArray readShortMDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT16, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final short[] data = new short[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MDShortArray(data, arrayDimensions);
+        } else
+        {
+            final short[] data =
+                    new short[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MDShortArray(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MDShortArray readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MDShortArray result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MDShortArray(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MDShortArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MDShortArray> readCallable = new ICallableWithCleanUp<MDShortArray>()
+            {
+                @Override
+                public MDShortArray call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final short[] dataBlock = new short[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, H5T_NATIVE_UINT16,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MDShortArray(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MDShortArray readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+            }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final short[] dataBlock =
+                new short[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(H5T_NATIVE_UINT16, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MDShortArray(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<short[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<short[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<short[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<short[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<short[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final short[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<short[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MDShortArray>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MDShortArray>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MDShortArray>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MDShortArray>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MDShortArray> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MDShortArray data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MDShortArray>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    short[] getShortArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(H5T_NATIVE_UINT16, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = H5T_NATIVE_UINT16;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final short[] data =
+                baseReader.h5.readAttributeAsShortArray(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MDShortArray getShortMDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(H5T_NATIVE_UINT16,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = H5T_NATIVE_UINT16;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final short[] data =
+                    baseReader.h5.readAttributeAsShortArray(attributeId,
+                            memoryTypeId, len);
+            return new MDShortArray(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortWriter.java b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortWriter.java
new file mode 100644
index 0000000..5137a5e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5UnsignedShortWriter.java
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_UINT16;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5ShortWriter}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5UnsignedShortWriter extends HDF5UnsignedShortReader implements IHDF5ShortWriter
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5UnsignedShortWriter(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final short value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U16LE,
+                                        H5T_NATIVE_UINT16, dataSpaceId, new short[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, H5T_STD_U16LE,
+                                        H5T_NATIVE_UINT16, -1, new short[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final short[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U16LE, H5T_NATIVE_UINT16,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT16, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U16LE, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDShortArray value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, H5T_STD_U16LE, H5T_NATIVE_UINT16,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(H5T_NATIVE_UINT16, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(H5T_STD_U16LE, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final short[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MDShortArray(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final short value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, H5T_STD_U16LE, H5T_NATIVE_UINT16, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final short[] data)
+    {
+        writeArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final short[] data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                H5T_STD_U16LE, new long[]
+                                { data.length }, 2, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT16, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                            features, new long[] { 0 }, new long[] { size }, 2, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                            features, new long[] { size }, null, 2, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, 2, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final short[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final short[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT16, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final short[][] data)
+    {
+        writeMatrix(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final short[][] data, 
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MDShortArray(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final short[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MDShortArray(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDShortArray data)
+    {
+        writeMDArray(objectPath, data, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDShortArray data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MDShortArray data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MDShortArray data,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_STD_U16LE, 
+                                    data.longDimensions(), 2, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT16, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, INT_NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), 2, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                                features, MDArray.toLong(dimensions), null, 2, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, H5T_STD_U16LE, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), 2, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MDShortArray data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MDShortArray data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MDShortArray data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT16, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MDShortArray(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, H5T_NATIVE_UINT16, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5Utils.java b/source/java/ch/systemsx/cisd/hdf5/HDF5Utils.java
new file mode 100644
index 0000000..0f6efb0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5Utils.java
@@ -0,0 +1,566 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Array;
+import java.util.Iterator;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.rinn.restrictions.Private;
+
+/**
+ * Some utility methods used by {@link HDF5Reader} and {@link HDF5Writer}.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5Utils
+{
+
+    /**
+     * The name for an explicitly saved string length attribute.
+     */
+    static final String STRING_LENGTH_ATTRIBUTE_NAME = "STRING_LENGTH";
+
+    /**
+     * The name for a type variant attribute.
+     */
+    static final String TYPE_VARIANT_ATTRIBUTE_NAME = "TYPE_VARIANT";
+
+    /** The minimal size of a chunk. */
+    @Private
+    static final int MIN_CHUNK_SIZE = 1;
+
+    /** The minimal size of a data set in order to allow for chunking. */
+    private static final long MIN_TOTAL_SIZE_FOR_CHUNKING = 128L;
+
+    /** The dimensions vector for a scalar data type. */
+    static final long[] SCALAR_DIMENSIONS = new long[]
+        { 1 };
+
+    /** The prefix for opqaue data types. */
+    static final String OPAQUE_PREFIX = "Opaque_";
+
+    /** The prefix for enum data types. */
+    static final String ENUM_PREFIX = "Enum_";
+
+    /** The prefix for compound data types. */
+    static final String COMPOUND_PREFIX = "Compound_";
+
+    /**
+     * The suffix for housekeeping files and groups. Setting this attribute overrides the default,
+     * which is: __NAME__.
+     */
+    static final String HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME = "__HOUSEKEEPING_SUFFIX__";
+
+    /**
+     * The length of the suffix for housekeeping files and groups.
+     */
+    static final String HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME = "__"
+            + STRING_LENGTH_ATTRIBUTE_NAME + "__" + HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME + "__";
+
+    /**
+     * The legacy attribute to signal that a data set is empty (for backward compatibility with
+     * 8.10).
+     */
+    static final String DATASET_IS_EMPTY_LEGACY_ATTRIBUTE = "__EMPTY__";
+
+    /** Returns the boolean data type. */
+    static String getBooleanDataTypePath(String houseKeepingNameSuffix)
+    {
+        return getDataTypeGroup(houseKeepingNameSuffix) + "/" + ENUM_PREFIX + "Boolean";
+    }
+
+    /** Returns the data type specifying a type variant. */
+    static String getTypeVariantDataTypePath(String houseKeepingNameSuffix)
+    {
+        return getDataTypeGroup(houseKeepingNameSuffix) + "/" + ENUM_PREFIX + "TypeVariant";
+    }
+
+    /** Returns the variable-length string data type. */
+    static String getVariableLengthStringDataTypePath(String houseKeepingNameSuffix)
+    {
+        return getDataTypeGroup(houseKeepingNameSuffix) + "/String_VariableLength";
+    }
+
+    /**
+     * Returns the attribute name to signal that this compound type has members with variant of the
+     * member data type.
+     */
+    static String getTypeVariantMembersAttributeName(String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? "__TYPE_VARIANT_MEMBERS__"
+                : "TYPE_VARIANT_MEMBERS" + houseKeepingNameSuffix;
+    }
+
+    /** Returns the attribute to store the name of the enum data type. */
+    static String getEnumTypeNameAttributeName(String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? "__ENUM_TYPE_NAME__" : "ENUM_TYPE_NAME"
+                + houseKeepingNameSuffix;
+    }
+
+    /** Returns the group to store all named derived data types in. */
+    static String getDataTypeGroup(String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? "/__DATA_TYPES__" : "/DATA_TYPES"
+                + houseKeepingNameSuffix;
+    }
+
+    /**
+     * All integer types in Java.
+     */
+    static Class<?>[] allIntegerTypes = new Class<?>[]
+        { byte.class, Byte.class, short.class, Short.class, int.class, Integer.class, long.class,
+                Long.class };
+
+    /**
+     * All float types in Java.
+     */
+    static Class<?>[] allFloatTypes = new Class<?>[]
+        { float.class, Float.class, double.class, Double.class };
+
+    /**
+     * All types in Java that can store time durations.
+     */
+    static Class<?>[] allTimeDurationTypes = new Class<?>[]
+        { byte.class, Byte.class, short.class, Short.class, int.class, Integer.class, long.class,
+                Long.class, float.class, Float.class, double.class, Double.class,
+                HDF5TimeDuration.class };
+
+    /**
+     * Returns the primitive type for wrapper classes of primitive types, and the <var>clazz</var>
+     * itself, otherwise.
+     */
+    static Class<?> unwrapClass(Class<?> clazz)
+    {
+        if (clazz == Byte.class)
+        {
+            return byte.class;
+        } else if (clazz == Short.class)
+        {
+            return short.class;
+        } else if (clazz == Integer.class)
+        {
+            return int.class;
+        } else if (clazz == Long.class)
+        {
+            return long.class;
+        } else if (clazz == Float.class)
+        {
+            return float.class;
+        } else if (clazz == Double.class)
+        {
+            return double.class;
+        } else if (clazz == Boolean.class)
+        {
+            return boolean.class;
+        } else
+        {
+            return clazz;
+        }
+    }
+
+    static String getSuperGroup(String path)
+    {
+        assert path != null;
+
+        final int lastIndexSlash = path.lastIndexOf('/');
+        if (lastIndexSlash <= 0)
+        {
+            return "/";
+        } else
+        {
+            return path.substring(0, lastIndexSlash);
+        }
+    }
+
+    static boolean isEmpty(long[] dimensions)
+    {
+        for (long d : dimensions)
+        {
+            if (d == 0)
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    static boolean isNonPositive(long[] dimensions)
+    {
+        for (long d : dimensions)
+        {
+            if (d <= 0)
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Returns the dimensions for a scalar, or <code>null</code>, if this data set is too small for
+     * chunking.
+     */
+    static long[] tryGetChunkSizeForString(int len, boolean tryChunkedDS)
+    {
+        if (tryChunkedDS)
+        {
+            return (len < MIN_TOTAL_SIZE_FOR_CHUNKING) ? null : SCALAR_DIMENSIONS;
+        } else
+        {
+            return null;
+        }
+    }
+
+    /**
+     * Returns a chunk size suitable for a data set with <var>dimension</var>, or <code>null</code>,
+     * if this data set can't be reasonably chunk-ed.
+     */
+    static long[] tryGetChunkSize(final long[] dimensions, int elementLength, boolean tryChunkedDS,
+            boolean enforceChunkedDS)
+    {
+        assert dimensions != null;
+
+        if (enforceChunkedDS == false && tryChunkedDS == false)
+        {
+            return null;
+        }
+        final long[] chunkSize = new long[dimensions.length];
+        long totalSize = elementLength;
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            totalSize *= dimensions[i];
+            chunkSize[i] = Math.max(MIN_CHUNK_SIZE, dimensions[i]);
+        }
+        if (enforceChunkedDS == false && totalSize < MIN_TOTAL_SIZE_FOR_CHUNKING)
+        {
+            return null;
+        }
+        return chunkSize;
+    }
+
+    /**
+     * Returns a path for a data type with <var>name</var> and (optional) <var>appendices</var>.
+     * <p>
+     * <b>Special case:</b> If the <var>appendices</var> array contains exactly one element and if
+     * this element starts with '/', this element itself will be considered the (complete) data type
+     * path.
+     */
+    static String createDataTypePath(String name, String houseKeepingSuffix, String... appendices)
+    {
+        if (appendices.length == 1 && appendices[0].startsWith("/"))
+        {
+            return appendices[0];
+        }
+        final StringBuilder builder = new StringBuilder();
+        builder.append(getDataTypeGroup(houseKeepingSuffix));
+        builder.append('/');
+        builder.append(name);
+        for (String app : appendices)
+        {
+            builder.append(app);
+        }
+        return builder.toString();
+    }
+
+    /**
+     * Returns the name for a committed data type with <var>pathOrNull</var>. If
+     * <code>pathOrNull == null</code>, the method will return <code>UNKNOWN</code>.
+     */
+    static String getDataTypeNameFromPath(String pathOrNull, String houseKeepingNameSuffix,
+            HDF5DataClass dataClass)
+    {
+        return (pathOrNull == null) ? "UNKNOWN" : tryGetDataTypeNameFromPath(pathOrNull,
+                houseKeepingNameSuffix, dataClass);
+    }
+
+    /**
+     * Returns the name for a committed data type with <var>pathOrNull</var>. If
+     * <code>pathOrNull == null</code>, the method will return <code>null</code>.
+     */
+    static String tryGetDataTypeNameFromPath(String pathOrNull, String houseKeepingNameSuffix,
+            HDF5DataClass dataClass)
+    {
+        if (pathOrNull == null)
+        {
+            return null;
+        } else
+        {
+            final String prefix = getPrefixForDataClass(dataClass);
+            final String pathPrefix = createDataTypePath(prefix, houseKeepingNameSuffix);
+            if (pathOrNull.startsWith(pathPrefix))
+            {
+                return pathOrNull.substring(pathPrefix.length());
+            } else
+            {
+                final int lastPathSepIdx = pathOrNull.lastIndexOf('/');
+                if (lastPathSepIdx >= 0)
+                {
+                    return pathOrNull.substring(lastPathSepIdx + 1);
+                } else
+                {
+                    return pathOrNull;
+                }
+            }
+        }
+    }
+
+    /**
+     * Returns a prefix for a given data class, or <code>""</code>, if this data class does not have
+     * a prefix.
+     */
+    static String getPrefixForDataClass(HDF5DataClass dataClass)
+    {
+        switch (dataClass)
+        {
+            case COMPOUND:
+                return COMPOUND_PREFIX;
+            case ENUM:
+                return ENUM_PREFIX;
+            case OPAQUE:
+                return OPAQUE_PREFIX;
+            default:
+                return "";
+        }
+    }
+
+    /**
+     * Returns the length of a one-dimension array defined by <var>dimensions</var>.
+     * 
+     * @throws HDF5JavaException If <var>dimensions</var> do not define a one-dimensional array.
+     */
+    static int getOneDimensionalArraySize(final int[] dimensions)
+    {
+        assert dimensions != null;
+
+        if (dimensions.length == 0) // Scalar data space needs to be treated differently
+        {
+            return 1;
+        }
+        if (dimensions.length != 1)
+        {
+            throw new HDF5JavaException("Data Set is expected to be of rank 1 (rank="
+                    + dimensions.length + ")");
+        }
+        return dimensions[0];
+    }
+
+    /**
+     * Returns the length of a one-dimension array defined by <var>dimensions</var>.
+     * 
+     * @throws HDF5JavaException If <var>dimensions</var> do not define a one-dimensional array or
+     *             if <code>dimensions[0]</code> overflows the <code>int</code> type.
+     */
+    static int getOneDimensionalArraySize(final long[] dimensions)
+    {
+        assert dimensions != null;
+
+        if (dimensions.length == 0) // Scalar data space needs to be treated differently
+        {
+            return 1;
+        }
+        if (dimensions.length != 1)
+        {
+            throw new HDF5JavaException("Data Set is expected to be of rank 1 (rank="
+                    + dimensions.length + ")");
+        }
+        final int length = (int) dimensions[0];
+        if (length != dimensions[0])
+        {
+            throw new HDF5JavaException("Length is too large (" + dimensions[0] + ")");
+        }
+        return length;
+    }
+
+    /** Returns the attribute to signal that this is a variant of the data type. */
+    static String createObjectTypeVariantAttributeName(String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? "__" + TYPE_VARIANT_ATTRIBUTE_NAME + "__"
+                : TYPE_VARIANT_ATTRIBUTE_NAME + houseKeepingNameSuffix;
+    }
+
+    /**
+     * Returns the type variant attribute for the given <var>attributeName</var>.
+     */
+    static String createAttributeTypeVariantAttributeName(String attributeName, String suffix)
+    {
+        final boolean noSuffix = "".equals(suffix);
+        return (noSuffix ? "__" : "") + TYPE_VARIANT_ATTRIBUTE_NAME + "__" + attributeName
+                + (noSuffix ? "__" : suffix);
+    }
+
+    /**
+     * Returns <code>true</code>, if <var>name</var> denotes an internal name used by the library
+     * for house-keeping.
+     */
+    private static boolean isInternalName(final String name)
+    {
+        return name.startsWith("__") && name.endsWith("__");
+    }
+
+    /**
+     * Returns <code>true</code>, if <var>name</var> denotes an internal name used by the library
+     * for house-keeping, given the <var>houseKeepingNameSuffix</var>.
+     */
+    static boolean isInternalName(String name, String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? isInternalName(name) : name
+                .endsWith(houseKeepingNameSuffix);
+    }
+
+    /**
+     * Returns <code>true</code> if the given <var>name</var> is an internal name.
+     */
+    static boolean isInternalName(final String name, final String houseKeepingNameSuffix,
+            final boolean filterRootAttributes)
+    {
+        if (filterRootAttributes)
+        {
+            return isInternalName(name, houseKeepingNameSuffix)
+                    || HOUSEKEEPING_NAME_SUFFIX_ATTRIBUTE_NAME.equals(name)
+                    || HOUSEKEEPING_NAME_SUFFIX_STRINGLENGTH_ATTRIBUTE_NAME.equals(name);
+        } else
+        {
+            return isInternalName(name, houseKeepingNameSuffix);
+        }
+    }
+
+    /**
+     * Creates an internal name from the given <var>name</var>, using the
+     * <var>houseKeepingNameSuffix</var>.
+     */
+    static String toHouseKeepingName(String name, String houseKeepingNameSuffix)
+    {
+        return "".equals(houseKeepingNameSuffix) ? "__" + name + "__" : name
+                + houseKeepingNameSuffix;
+    }
+
+    /**
+     * Creates an internal name from the given <var>objectPath</var>, using the
+     * <var>houseKeepingNameSuffix</var>.
+     */
+    static String toHouseKeepingPath(String objectPath, String houseKeepingNameSuffix)
+    {
+        final int lastPathSeparator = objectPath.lastIndexOf('/') + 1;
+        return lastPathSeparator > 0 ? objectPath.substring(0, lastPathSeparator)
+                + toHouseKeepingName(objectPath.substring(lastPathSeparator),
+                        houseKeepingNameSuffix) : toHouseKeepingName(objectPath,
+                houseKeepingNameSuffix);
+    }
+
+    /**
+     * Removes all internal names from the list <var>names</var>.
+     * 
+     * @return The list <var>names</var>.
+     */
+    static List<String> removeInternalNames(final List<String> names,
+            final String houseKeepingNameSuffix, final boolean filterRootAttributes)
+    {
+        for (Iterator<String> iterator = names.iterator(); iterator.hasNext(); /**/)
+        {
+            final String memberName = iterator.next();
+            if (isInternalName(memberName, houseKeepingNameSuffix, filterRootAttributes))
+            {
+                iterator.remove();
+            }
+        }
+        return names;
+    }
+
+    @SuppressWarnings("unchecked")
+    static <T> T[] createArray(final Class<T> componentClass, final int vectorLength)
+    {
+        final T[] value = (T[]) java.lang.reflect.Array.newInstance(componentClass, vectorLength);
+        return value;
+    }
+
+    /**
+     * If all elements of <var>dimensions</var> are 1, the data set might be empty, then check
+     * {@link #DATASET_IS_EMPTY_LEGACY_ATTRIBUTE} (for backward compatibility with 8.10)
+     */
+    static boolean mightBeEmptyInStorage(final long[] dimensions)
+    {
+        for (long d : dimensions)
+        {
+            if (d != 1L)
+            {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Checks the consistency of the dimension of a given array.
+     * <p>
+     * As Java doesn't have a matrix data type, but only arrays of arrays, there is no way to ensure
+     * in the language itself whether all rows have the same length.
+     * 
+     * @return <code>true</code> if the given matrix is consisten and <code>false</code> otherwise.
+     */
+    static boolean areMatrixDimensionsConsistent(Object a)
+    {
+        if (a.getClass().isArray() == false)
+        {
+            return false;
+        }
+        final int length = Array.getLength(a);
+        if (length == 0)
+        {
+            return true;
+        }
+        final Object element = Array.get(a, 0);
+        if (element.getClass().isArray())
+        {
+            final int elementLength = Array.getLength(element);
+            for (int i = 0; i < length; ++i)
+            {
+                final Object o = Array.get(a, i);
+                if (areMatrixDimensionsConsistent(o) == false)
+                {
+                    return false;
+                }
+                if (elementLength != Array.getLength(o))
+                {
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Checks if <var>subDimensions</var> are in bounds of <var>dimensions</var>.
+     */
+    static boolean isInBounds(long[] dimensions, long[] subDimensions)
+    {
+        assert dimensions.length == subDimensions.length;
+
+        for (int i = 0; i < dimensions.length; ++i)
+        {
+            if (subDimensions[i] > dimensions[i])
+            {
+                return false;
+            }
+        }
+        return true;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5ValueObjectByteifyer.java b/source/java/ch/systemsx/cisd/hdf5/HDF5ValueObjectByteifyer.java
new file mode 100644
index 0000000..ac6a21e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5ValueObjectByteifyer.java
@@ -0,0 +1,363 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * A class that byteifies Java value objects. The fields have to be specified by name. This class
+ * can handle all primitive types and Strings.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ValueObjectByteifyer<T>
+{
+
+    private final HDF5MemberByteifyer[] byteifyers;
+
+    private final int recordSizeInMemory;
+
+    private final int recordSizeOnDisk;
+
+    private final int[] vlMemberIndices;
+
+    private Class<?> cachedRecordClass;
+
+    private Constructor<?> cachedDefaultConstructor;
+
+    @SuppressWarnings("unchecked")
+    private static <T> T newMap(int size)
+    {
+        return (T) new HDF5CompoundDataMap(size);
+    }
+
+    @SuppressWarnings("unchecked")
+    private static <T> T newList(int size)
+    {
+        return (T) new HDF5CompoundDataList(Collections.nCopies(size, null));
+    }
+
+    @SuppressWarnings("unchecked")
+    private static <T> T newArray(int size)
+    {
+        return (T) new Object[size];
+    }
+
+    /** A role that provides direct access to the HDF5 file to this byteifyer. */
+    interface IFileAccessProvider
+    {
+        public int getBooleanDataTypeId();
+
+        public int getStringDataTypeId(int maxLength);
+
+        public int getVariableLengthStringDataTypeId();
+
+        public int getArrayTypeId(int baseTypeId, int length);
+
+        public int getArrayTypeId(int baseTypeId, int[] dimensions);
+
+        public HDF5EnumerationType getEnumType(String[] options);
+
+        public CharacterEncoding getCharacterEncoding(int dataTypeId);
+        
+        public byte[] createObjectReference(String referencedObjectPath);
+    }
+
+    HDF5ValueObjectByteifyer(Class<T> clazz, IFileAccessProvider fileInfoProvider,
+            CompoundTypeInformation compoundTypeInfoOrNull, HDF5CompoundMemberMapping... members)
+    {
+        byteifyers =
+                HDF5CompoundByteifyerFactory.createMemberByteifyers(clazz, fileInfoProvider,
+                        compoundTypeInfoOrNull, members);
+        int numberOfVLMembers = 0;
+        if (compoundTypeInfoOrNull != null)
+        {
+            recordSizeOnDisk = compoundTypeInfoOrNull.recordSizeOnDisk;
+            recordSizeInMemory = compoundTypeInfoOrNull.recordSizeInMemory;
+            numberOfVLMembers = compoundTypeInfoOrNull.getNumberOfVLMembers();
+        } else if (byteifyers.length > 0)
+        {
+            recordSizeOnDisk = byteifyers[byteifyers.length - 1].getTotalSizeOnDisk();
+            recordSizeInMemory =
+                    PaddingUtils.padOffset(
+                            byteifyers[byteifyers.length - 1].getTotalSizeInMemory(),
+                            PaddingUtils.findMaxElementSize(byteifyers));
+            for (HDF5MemberByteifyer byteifyer : byteifyers)
+            {
+                if (byteifyer.isVariableLengthType())
+                {
+                    ++numberOfVLMembers;
+                }
+            }
+        } else
+        {
+            recordSizeOnDisk = 0;
+            recordSizeInMemory = 0;
+        }
+        vlMemberIndices = new int[numberOfVLMembers];
+        int idx = 0;
+        for (HDF5MemberByteifyer byteifyer : byteifyers)
+        {
+            if (byteifyer.isVariableLengthType())
+            {
+                vlMemberIndices[idx++] = byteifyer.getOffsetInMemory();
+            }
+        }
+    }
+
+    public int insertMemberTypes(int dataTypeId)
+    {
+        for (HDF5MemberByteifyer byteifyer : byteifyers)
+        {
+            byteifyer.insertType(dataTypeId);
+        }
+        return dataTypeId;
+    }
+
+    public int insertNativeMemberTypes(int dataTypeId, HDF5 h5, ICleanUpRegistry registry)
+    {
+        for (HDF5MemberByteifyer byteifyer : byteifyers)
+        {
+            byteifyer.insertNativeType(dataTypeId, h5, registry);
+        }
+        return dataTypeId;
+    }
+
+    /**
+     * @throw {@link HDF5JavaException} if one of the elements in <var>arr</var> exceeding its
+     *        pre-defined size.
+     */
+    public byte[] byteify(int compoundDataTypeId, T[] arr) throws HDF5JavaException
+    {
+        final byte[] barray = new byte[arr.length * recordSizeInMemory];
+        int offset = 0;
+        int counter = 0;
+        for (Object obj : arr)
+        {
+            for (HDF5MemberByteifyer byteifyer : byteifyers)
+            {
+                try
+                {
+                    final byte[] b = byteifyer.byteify(compoundDataTypeId, obj);
+                    if (b.length > byteifyer.getSize() && byteifyer.mayBeCut() == false)
+                    {
+                        throw new HDF5JavaException("Compound " + byteifyer.describe()
+                                + " of array element " + counter + " must not exceed "
+                                + byteifyer.getSize() + " bytes, but is of size " + b.length
+                                + " bytes.");
+                    }
+                    System.arraycopy(b, 0, barray, offset + byteifyer.getOffsetInMemory(),
+                            Math.min(b.length, byteifyer.getSize()));
+                } catch (IllegalAccessException ex)
+                {
+                    throw new HDF5JavaException("Error accessing " + byteifyer.describe());
+                }
+            }
+            offset += recordSizeInMemory;
+            ++counter;
+        }
+        return barray;
+    }
+
+    /**
+     * @throw {@link HDF5JavaException} if <var>obj</var> exceeding its pre-defined size.
+     */
+    public byte[] byteify(int compoundDataTypeId, T obj) throws HDF5JavaException
+    {
+        final byte[] barray = new byte[recordSizeInMemory];
+        for (HDF5MemberByteifyer byteifyer : byteifyers)
+        {
+            try
+            {
+                final byte[] b = byteifyer.byteify(compoundDataTypeId, obj);
+                if (b.length > byteifyer.getSize() && byteifyer.mayBeCut() == false)
+                {
+                    throw new HDF5JavaException("Compound " + byteifyer.describe()
+                            + " must not exceed " + byteifyer.getSize() + " bytes, but is of size "
+                            + b.length + " bytes.");
+                }
+                System.arraycopy(b, 0, barray, byteifyer.getOffsetInMemory(),
+                        Math.min(b.length, byteifyer.getSize()));
+            } catch (IllegalAccessException ex)
+            {
+                throw new HDF5JavaException("Error accessing " + byteifyer.describe());
+            }
+        }
+        return barray;
+    }
+
+    public T[] arrayify(int compoundDataTypeId, byte[] byteArr, Class<T> recordClass)
+    {
+        final int length = byteArr.length / recordSizeInMemory;
+        if (length * recordSizeInMemory != byteArr.length)
+        {
+            throw new HDF5JavaException("Illegal byte array for compound type (length "
+                    + byteArr.length + " is not a multiple of record size " + recordSizeInMemory
+                    + ")");
+        }
+        final T[] result = HDF5Utils.createArray(recordClass, length);
+        int offset = 0;
+        for (int i = 0; i < length; ++i)
+        {
+            result[i] = primArrayifyScalar(compoundDataTypeId, byteArr, recordClass, offset);
+            offset += recordSizeInMemory;
+        }
+        return result;
+    }
+
+    public T arrayifyScalar(int compoundDataTypeId, byte[] byteArr, Class<T> recordClass)
+    {
+        if (byteArr.length < recordSizeInMemory)
+        {
+            throw new HDF5JavaException("Illegal byte array for scalar compound type (length "
+                    + byteArr.length + " is smaller than record size " + recordSizeInMemory + ")");
+        }
+        return primArrayifyScalar(compoundDataTypeId, byteArr, recordClass, 0);
+    }
+
+    private T primArrayifyScalar(int compoundDataTypeId, byte[] byteArr, Class<T> recordClass,
+            int offset)
+    {
+        T result = newInstance(recordClass);
+        for (HDF5MemberByteifyer byteifyer : byteifyers)
+        {
+            try
+            {
+                byteifyer.setFromByteArray(compoundDataTypeId, result, byteArr, offset);
+            } catch (IllegalAccessException ex)
+            {
+                throw new HDF5JavaException("Error accessing " + byteifyer.describe());
+            }
+        }
+        return result;
+    }
+
+    @SuppressWarnings("unchecked")
+    private T newInstance(Class<?> recordClass) throws HDF5JavaException
+    {
+        if (Map.class.isAssignableFrom(recordClass))
+        {
+            return newMap(byteifyers.length);
+        }
+        if (List.class.isAssignableFrom(recordClass))
+        {
+            return newList(byteifyers.length);
+        }
+        if (recordClass == Object[].class)
+        {
+            return newArray(byteifyers.length);
+        }
+        try
+        {
+            if (recordClass != cachedRecordClass)
+            {
+                cachedRecordClass = recordClass;
+                cachedDefaultConstructor = ReflectionUtils.getDefaultConstructor(recordClass);
+            }
+            return (T) cachedDefaultConstructor.newInstance();
+        } catch (Exception ex)
+        {
+            throw new HDF5JavaException("Creation of new object of class "
+                    + recordClass.getCanonicalName() + " by default constructor failed: "
+                    + ex.toString());
+        }
+    }
+
+    public int getRecordSizeOnDisk()
+    {
+        return recordSizeOnDisk;
+    }
+
+    public int getRecordSizeInMemory()
+    {
+        return recordSizeInMemory;
+    }
+
+    public HDF5MemberByteifyer[] getByteifyers()
+    {
+        return byteifyers;
+    }
+
+    /**
+     * Returns <code>true</code> if the value object byteifyer has any members that cannot be mapped
+     * to the in-memory representation.
+     */
+    public boolean hasUnmappedMembers()
+    {
+        for (HDF5MemberByteifyer memberByteifyer : byteifyers)
+        {
+            if (memberByteifyer.isDummy())
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Returns a list with the names of all members that cannot be mapped to the in-memory
+     * representation.
+     */
+    public String[] getUnmappedMembers()
+    {
+        if (hasUnmappedMembers())
+        {
+            final List<String> unmappedMembers = new ArrayList<String>();
+            for (HDF5MemberByteifyer memberByteifyer : byteifyers)
+            {
+                if (memberByteifyer.isDummy())
+                {
+                    unmappedMembers.add(memberByteifyer.getMemberName());
+                }
+            }
+            return unmappedMembers.toArray(new String[unmappedMembers.size()]);
+        } else
+        {
+            return new String[0];
+        }
+    }
+
+    boolean hasVLMembers()
+    {
+        return vlMemberIndices.length > 0;
+    }
+
+    int[] getVLMemberIndices()
+    {
+        return vlMemberIndices;
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public String toString()
+    {
+        return "HDF5ValueObjectByteifyer [byteifyers=" + Arrays.toString(byteifyers) + "]";
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5Writer.java b/source/java/ch/systemsx/cisd/hdf5/HDF5Writer.java
new file mode 100644
index 0000000..574e842
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5Writer.java
@@ -0,0 +1,3110 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.Flushable;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SymbolTableException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever.IByteArrayInspector;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * A class for writing HDF5 files (HDF5 1.6.x or HDF5 1.8.x).
+ * <p>
+ * The class focuses on ease of use instead of completeness. As a consequence not all valid HDF5
+ * files can be generated using this class, but only a subset.
+ * <p>
+ * Usage:
+ * 
+ * <pre>
+ * float[] f = new float[100];
+ * ...
+ * HDF5Writer writer = new HDF5WriterConfig("test.h5").writer();
+ * writer.writeFloatArray("/some/path/dataset", f);
+ * writer.addAttribute("some key", "some value");
+ * writer.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5Writer extends HDF5Reader implements IHDF5Writer
+{
+    private final HDF5BaseWriter baseWriter;
+
+    private final IHDF5FileLevelReadWriteHandler fileHandler;
+
+    private final IHDF5ObjectReadWriteInfoProviderHandler objectHandler;
+
+    private final IHDF5ByteWriter byteWriter;
+
+    private final IHDF5ByteWriter ubyteWriter;
+
+    private final IHDF5ShortWriter shortWriter;
+
+    private final IHDF5ShortWriter ushortWriter;
+
+    private final IHDF5IntWriter intWriter;
+
+    private final IHDF5IntWriter uintWriter;
+
+    private final IHDF5LongWriter longWriter;
+
+    private final IHDF5LongWriter ulongWriter;
+
+    private final IHDF5FloatWriter floatWriter;
+
+    private final IHDF5DoubleWriter doubleWriter;
+
+    private final IHDF5BooleanWriter booleanWriter;
+
+    private final IHDF5StringWriter stringWriter;
+
+    private final IHDF5EnumWriter enumWriter;
+
+    private final IHDF5CompoundWriter compoundWriter;
+
+    private final IHDF5DateTimeWriter dateTimeWriter;
+
+    private final HDF5TimeDurationWriter timeDurationWriter;
+
+    private final IHDF5ReferenceWriter referenceWriter;
+
+    private final IHDF5OpaqueWriter opaqueWriter;
+
+    HDF5Writer(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        this.baseWriter = baseWriter;
+        this.fileHandler = new HDF5FileLevelReadWriteHandler(baseWriter);
+        this.objectHandler = new HDF5ObjectReadWriteInfoProviderHandler(baseWriter);
+        this.byteWriter = new HDF5ByteWriter(baseWriter);
+        this.ubyteWriter = new HDF5UnsignedByteWriter(baseWriter);
+        this.shortWriter = new HDF5ShortWriter(baseWriter);
+        this.ushortWriter = new HDF5UnsignedShortWriter(baseWriter);
+        this.intWriter = new HDF5IntWriter(baseWriter);
+        this.uintWriter = new HDF5UnsignedIntWriter(baseWriter);
+        this.longWriter = new HDF5LongWriter(baseWriter);
+        this.ulongWriter = new HDF5UnsignedLongWriter(baseWriter);
+        this.floatWriter = new HDF5FloatWriter(baseWriter);
+        this.doubleWriter = new HDF5DoubleWriter(baseWriter);
+        this.booleanWriter = new HDF5BooleanWriter(baseWriter);
+        this.stringWriter = new HDF5StringWriter(baseWriter);
+        this.enumWriter = new HDF5EnumWriter(baseWriter);
+        this.compoundWriter = new HDF5CompoundWriter(baseWriter, enumWriter);
+        this.dateTimeWriter = new HDF5DateTimeWriter(baseWriter, (HDF5LongReader) longReader);
+        this.timeDurationWriter =
+                new HDF5TimeDurationWriter(baseWriter, (HDF5LongReader) longReader);
+        this.referenceWriter = new HDF5ReferenceWriter(baseWriter);
+        this.opaqueWriter = new HDF5OpaqueWriter(baseWriter);
+    }
+
+    HDF5BaseWriter getBaseWriter()
+    {
+        return baseWriter;
+    }
+
+    // /////////////////////
+    // File
+    // /////////////////////
+
+    @Override
+    public IHDF5FileLevelReadWriteHandler file()
+    {
+        return fileHandler;
+    }
+
+    @Override
+    public boolean isUseExtendableDataTypes()
+    {
+        return baseWriter.useExtentableDataTypes;
+    }
+
+    @Override
+    public FileFormat getFileFormat()
+    {
+        return baseWriter.fileFormat;
+    }
+
+    @Override
+    public void flush()
+    {
+        baseWriter.checkOpen();
+        baseWriter.flush();
+    }
+
+    @Override
+    public void flushSyncBlocking()
+    {
+        baseWriter.checkOpen();
+        baseWriter.flushSyncBlocking();
+    }
+
+    @Override
+    public boolean addFlushable(Flushable flushable)
+    {
+        return baseWriter.addFlushable(flushable);
+    }
+
+    @Override
+    public boolean removeFlushable(Flushable flushable)
+    {
+        return baseWriter.removeFlushable(flushable);
+    }
+
+    // /////////////////////////////////
+    // Objects, links, groups and types
+    // /////////////////////////////////
+
+    @Override
+    public IHDF5ObjectReadWriteInfoProviderHandler object()
+    {
+        return objectHandler;
+    }
+
+    @Override
+    public HDF5LinkInformation getLinkInformation(String objectPath)
+    {
+        return objectHandler.getLinkInformation(objectPath);
+    }
+
+    @Override
+    public void createHardLink(String currentPath, String newPath)
+    {
+        objectHandler.createHardLink(currentPath, newPath);
+    }
+
+    @Override
+    public HDF5ObjectInformation getObjectInformation(String objectPath)
+    {
+        return objectHandler.getObjectInformation(objectPath);
+    }
+
+    @Override
+    public void createSoftLink(String targetPath, String linkPath)
+    {
+        objectHandler.createSoftLink(targetPath, linkPath);
+    }
+
+    @Override
+    public void createOrUpdateSoftLink(String targetPath, String linkPath)
+    {
+        objectHandler.createOrUpdateSoftLink(targetPath, linkPath);
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(String objectPath, boolean followLink)
+    {
+        return objectHandler.getObjectType(objectPath, followLink);
+    }
+
+    @Override
+    public void createExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException
+    {
+        objectHandler.createExternalLink(targetFileName, targetPath, linkPath);
+    }
+
+    @Override
+    public HDF5ObjectType getObjectType(String objectPath)
+    {
+        return objectHandler.getObjectType(objectPath);
+    }
+
+    @Override
+    public boolean exists(String objectPath, boolean followLink)
+    {
+        return objectHandler.exists(objectPath, followLink);
+    }
+
+    @Override
+    public boolean exists(String objectPath)
+    {
+        return objectHandler.exists(objectPath);
+    }
+
+    @Override
+    public void createOrUpdateExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException
+    {
+        objectHandler.createOrUpdateExternalLink(targetFileName, targetPath, linkPath);
+    }
+
+    @Override
+    public String toHouseKeepingPath(String objectPath)
+    {
+        return objectHandler.toHouseKeepingPath(objectPath);
+    }
+
+    @Override
+    public boolean isHouseKeepingObject(String objectPath)
+    {
+        return objectHandler.isHouseKeepingObject(objectPath);
+    }
+
+    @Override
+    public boolean isGroup(String objectPath, boolean followLink)
+    {
+        return objectHandler.isGroup(objectPath, followLink);
+    }
+
+    @Override
+    public boolean isGroup(String objectPath)
+    {
+        return objectHandler.isGroup(objectPath);
+    }
+
+    @Override
+    public void delete(String objectPath)
+    {
+        objectHandler.delete(objectPath);
+    }
+
+    @Override
+    public void move(String oldLinkPath, String newLinkPath) throws HDF5SymbolTableException
+    {
+        objectHandler.move(oldLinkPath, newLinkPath);
+    }
+
+    @Override
+    public boolean isDataSet(String objectPath, boolean followLink)
+    {
+        return objectHandler.isDataSet(objectPath, followLink);
+    }
+
+    @Override
+    public void createGroup(String groupPath)
+    {
+        objectHandler.createGroup(groupPath);
+    }
+
+    @Override
+    public boolean isDataSet(String objectPath)
+    {
+        return objectHandler.isDataSet(objectPath);
+    }
+
+    @Override
+    public void createGroup(String groupPath, int sizeHint)
+    {
+        objectHandler.createGroup(groupPath, sizeHint);
+    }
+
+    @Override
+    public boolean isDataType(String objectPath, boolean followLink)
+    {
+        return objectHandler.isDataType(objectPath, followLink);
+    }
+
+    @Override
+    public boolean isDataType(String objectPath)
+    {
+        return objectHandler.isDataType(objectPath);
+    }
+
+    @Override
+    public void createGroup(String groupPath, int maxCompact, int minDense)
+    {
+        objectHandler.createGroup(groupPath, maxCompact, minDense);
+    }
+
+    @Override
+    public boolean isSoftLink(String objectPath)
+    {
+        return objectHandler.isSoftLink(objectPath);
+    }
+
+    @Override
+    public boolean isExternalLink(String objectPath)
+    {
+        return objectHandler.isExternalLink(objectPath);
+    }
+
+    @Override
+    public boolean isSymbolicLink(String objectPath)
+    {
+        return objectHandler.isSymbolicLink(objectPath);
+    }
+
+    @Override
+    public String tryGetSymbolicLinkTarget(String objectPath)
+    {
+        return objectHandler.tryGetSymbolicLinkTarget(objectPath);
+    }
+
+    @Override
+    public void setDataSetSize(String objectPath, long newSize)
+    {
+        objectHandler.setDataSetSize(objectPath, newSize);
+    }
+
+    @Override
+    public boolean hasAttribute(String objectPath, String attributeName)
+    {
+        return objectHandler.hasAttribute(objectPath, attributeName);
+    }
+
+    @Override
+    public void setDataSetDimensions(String objectPath, long[] newDimensions)
+    {
+        objectHandler.setDataSetDimensions(objectPath, newDimensions);
+    }
+
+    @Override
+    public List<String> getAttributeNames(String objectPath)
+    {
+        return objectHandler.getAttributeNames(objectPath);
+    }
+
+    @Override
+    public void setTypeVariant(String objectPath, HDF5DataTypeVariant typeVariant)
+    {
+        objectHandler.setTypeVariant(objectPath, typeVariant);
+    }
+
+    @Override
+    public List<String> getAllAttributeNames(String objectPath)
+    {
+        return objectHandler.getAllAttributeNames(objectPath);
+    }
+
+    @Override
+    public void setTypeVariant(String objectPath, String attributeName,
+            HDF5DataTypeVariant typeVariant)
+    {
+        objectHandler.setTypeVariant(objectPath, attributeName, typeVariant);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(String objectPath, String attributeName)
+    {
+        return objectHandler.getAttributeInformation(objectPath, attributeName);
+    }
+
+    @Override
+    public void deleteTypeVariant(String objectPath)
+    {
+        objectHandler.deleteTypeVariant(objectPath);
+    }
+
+    @Override
+    public void deleteTypeVariant(String objectPath, String attributeName)
+    {
+        objectHandler.deleteTypeVariant(objectPath, attributeName);
+    }
+
+    @Override
+    public HDF5DataTypeInformation getAttributeInformation(String objectPath, String attributeName,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return objectHandler
+                .getAttributeInformation(objectPath, attributeName, dataTypeInfoOptions);
+    }
+
+    @Override
+    public void deleteAttribute(String objectPath, String name)
+    {
+        objectHandler.deleteAttribute(objectPath, name);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(String dataSetPath)
+    {
+        return objectHandler.getDataSetInformation(dataSetPath);
+    }
+
+    @Override
+    public HDF5DataSetInformation getDataSetInformation(String dataSetPath,
+            DataTypeInfoOptions dataTypeInfoOptions)
+    {
+        return objectHandler.getDataSetInformation(dataSetPath, dataTypeInfoOptions);
+    }
+
+    @Override
+    public long getSize(String objectPath)
+    {
+        return objectHandler.getSize(objectPath);
+    }
+
+    @Override
+    public long getNumberOfElements(String objectPath)
+    {
+        return objectHandler.getNumberOfElements(objectPath);
+    }
+
+    @Override
+    public void copy(String sourceObject, IHDF5Writer destinationWriter, String destinationObject)
+    {
+        objectHandler.copy(sourceObject, destinationWriter, destinationObject);
+    }
+
+    @Override
+    public void copy(String sourceObject, IHDF5Writer destinationWriter)
+    {
+        objectHandler.copy(sourceObject, destinationWriter);
+    }
+
+    @Override
+    public void copyAll(IHDF5Writer destinationWriter)
+    {
+        objectHandler.copyAll(destinationWriter);
+    }
+
+    @Override
+    public List<String> getGroupMembers(String groupPath)
+    {
+        return objectHandler.getGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getAllGroupMembers(String groupPath)
+    {
+        return objectHandler.getAllGroupMembers(groupPath);
+    }
+
+    @Override
+    public List<String> getGroupMemberPaths(String groupPath)
+    {
+        return objectHandler.getGroupMemberPaths(groupPath);
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getGroupMemberInformation(String groupPath,
+            boolean readLinkTargets)
+    {
+        return objectHandler.getGroupMemberInformation(groupPath, readLinkTargets);
+    }
+
+    @Override
+    public List<HDF5LinkInformation> getAllGroupMemberInformation(String groupPath,
+            boolean readLinkTargets)
+    {
+        return objectHandler.getAllGroupMemberInformation(groupPath, readLinkTargets);
+    }
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(String objectPath)
+    {
+        return objectHandler.tryGetTypeVariant(objectPath);
+    }
+
+    @Override
+    public HDF5DataTypeVariant tryGetTypeVariant(String objectPath, String attributeName)
+    {
+        return objectHandler.tryGetTypeVariant(objectPath, attributeName);
+    }
+
+    @Override
+    public String tryGetDataTypePath(String objectPath)
+    {
+        return objectHandler.tryGetDataTypePath(objectPath);
+    }
+
+    @Override
+    public String tryGetDataTypePath(HDF5DataType type)
+    {
+        return objectHandler.tryGetDataTypePath(type);
+    }
+
+    @Override
+    public void setBooleanAttribute(String objectPath, String name, boolean value)
+    {
+        booleanWriter.setAttr(objectPath, name, value);
+    }
+
+    // /////////////////////////////
+    // Data Set Reading and Writing
+    // /////////////////////////////
+
+    //
+    // Boolean
+    //
+
+    @Override
+    public IHDF5BooleanWriter bool()
+    {
+        return booleanWriter;
+    }
+
+    @Override
+    public void writeBitField(String objectPath, BitSet data, HDF5GenericStorageFeatures features)
+    {
+        booleanWriter.writeBitField(objectPath, data, features);
+    }
+
+    @Override
+    public void writeBitField(String objectPath, BitSet data)
+    {
+        booleanWriter.writeBitField(objectPath, data);
+    }
+
+    @Override
+    public void writeBoolean(String objectPath, boolean value)
+    {
+        booleanWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void createBitField(String objectPath, int size)
+    {
+        booleanWriter.createBitField(objectPath, size);
+    }
+
+    @Override
+    public void createBitField(String objectPath, long size, int blockSize)
+    {
+        booleanWriter.createBitField(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createBitField(String objectPath, int size, HDF5IntStorageFeatures features)
+    {
+        booleanWriter.createBitField(objectPath, size, HDF5GenericStorageFeatures.build(features)
+                .features());
+    }
+
+    @Override
+    public void createBitField(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        booleanWriter.createBitField(objectPath, size, blockSize,
+                HDF5GenericStorageFeatures.build(features).features());
+    }
+
+    @Override
+    public void writeBitFieldBlock(String objectPath, BitSet data, int dataSize, long blockNumber)
+    {
+        booleanWriter.writeBitFieldBlock(objectPath, data, dataSize, blockNumber);
+    }
+
+    @Override
+    public void writeBitFieldBlockWithOffset(String objectPath, BitSet data, int dataSize,
+            long offset)
+    {
+        booleanWriter.writeBitFieldBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    //
+    // Opaque
+    //
+
+    @Override
+    public IHDF5OpaqueWriter opaque()
+    {
+        return opaqueWriter;
+    }
+
+    @Override
+    public HDF5OpaqueType createOpaqueByteArray(String objectPath, String tag, int size,
+            HDF5GenericStorageFeatures features)
+    {
+        return opaqueWriter.createArray(objectPath, tag, size, features);
+    }
+
+    @Override
+    public HDF5OpaqueType createOpaqueByteArray(String objectPath, String tag, int size)
+    {
+        return opaqueWriter.createArray(objectPath, tag, size);
+    }
+
+    @Override
+    public HDF5OpaqueType createOpaqueByteArray(String objectPath, String tag, long size,
+            int blockSize, HDF5GenericStorageFeatures features)
+    {
+        return opaqueWriter.createArray(objectPath, tag, size, blockSize, features);
+    }
+
+    @Override
+    public HDF5OpaqueType createOpaqueByteArray(String objectPath, String tag, long size,
+            int blockSize)
+    {
+        return opaqueWriter.createArray(objectPath, tag, size, blockSize);
+    }
+
+    @Override
+    public void writeOpaqueByteArray(String objectPath, String tag, byte[] data,
+            HDF5GenericStorageFeatures features)
+    {
+        opaqueWriter.writeArray(objectPath, tag, data, features);
+    }
+
+    @Override
+    public void writeOpaqueByteArray(String objectPath, String tag, byte[] data)
+    {
+        opaqueWriter.writeArray(objectPath, tag, data);
+    }
+
+    @Override
+    public void writeOpaqueByteArrayBlock(String objectPath, HDF5OpaqueType dataType, byte[] data,
+            long blockNumber)
+    {
+        opaqueWriter.writeArrayBlock(objectPath, dataType, data, blockNumber);
+    }
+
+    @Override
+    public void writeOpaqueByteArrayBlockWithOffset(String objectPath, HDF5OpaqueType dataType,
+            byte[] data, int dataSize, long offset)
+    {
+        opaqueWriter.writeArrayBlockWithOffset(objectPath, dataType, data, dataSize, offset);
+    }
+
+    //
+    // Date
+    //
+
+    @Override
+    public IHDF5DateTimeWriter time()
+    {
+        return dateTimeWriter;
+    }
+
+    @Override
+    public IHDF5TimeDurationWriter duration()
+    {
+        return timeDurationWriter;
+    }
+
+    @Override
+    public void createTimeStampArray(String objectPath, int size,
+            HDF5GenericStorageFeatures features)
+    {
+        dateTimeWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void setTimeStampAttribute(String objectPath, String name, long value)
+    {
+        dateTimeWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setDateAttribute(String objectPath, String name, Date date)
+    {
+        dateTimeWriter.setAttr(objectPath, name, date);
+    }
+
+    @Override
+    public void setTimeDurationAttribute(String objectPath, String name,
+            HDF5TimeDuration timeDuration)
+    {
+        timeDurationWriter.setAttr(objectPath, name, timeDuration);
+    }
+
+    @Override
+    public void setTimeDurationAttribute(String objectPath, String name, long timeDuration,
+            HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.setAttr(objectPath, name, timeDuration, timeUnit);
+    }
+
+    @Override
+    public void setDateArrayAttribute(String objectPath, String name, Date[] dates)
+    {
+        dateTimeWriter.setArrayAttr(objectPath, name, dates);
+    }
+
+    @Override
+    public void setTimeStampArrayAttribute(String objectPath, String name, long[] timeStamps)
+    {
+        dateTimeWriter.setArrayAttr(objectPath, name, timeStamps);
+    }
+
+    @Override
+    public void setTimeDurationArrayAttribute(String objectPath, String name,
+            HDF5TimeDurationArray timeDurations)
+    {
+        timeDurationWriter.setArrayAttr(objectPath, name, timeDurations);
+    }
+
+    @Override
+    public void createTimeStampArray(String objectPath, int size)
+    {
+        dateTimeWriter.createArray(objectPath, size);
+    }
+
+    @Override
+    public void createTimeStampArray(String objectPath, long size, int blockSize,
+            HDF5GenericStorageFeatures features)
+    {
+        dateTimeWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createTimeStampArray(String objectPath, long size, int blockSize)
+    {
+        dateTimeWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void writeDate(String objectPath, Date date)
+    {
+        dateTimeWriter.write(objectPath, date);
+    }
+
+    @Override
+    public void writeDateArray(String objectPath, Date[] dates, HDF5GenericStorageFeatures features)
+    {
+        dateTimeWriter.writeArray(objectPath, dates, features);
+    }
+
+    @Override
+    public void writeDateArray(String objectPath, Date[] dates)
+    {
+        dateTimeWriter.writeArray(objectPath, dates);
+    }
+
+    @Override
+    public void writeTimeStamp(String objectPath, long timeStamp)
+    {
+        dateTimeWriter.write(objectPath, timeStamp);
+    }
+
+    @Override
+    public void writeTimeStampArray(String objectPath, long[] timeStamps,
+            HDF5GenericStorageFeatures features)
+    {
+        dateTimeWriter.writeArray(objectPath, timeStamps, features);
+    }
+
+    @Override
+    public void writeTimeStampArray(String objectPath, long[] timeStamps)
+    {
+        dateTimeWriter.writeArray(objectPath, timeStamps);
+    }
+
+    @Override
+    public void writeTimeStampArrayBlock(String objectPath, long[] data, long blockNumber)
+    {
+        dateTimeWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeTimeStampArrayBlockWithOffset(String objectPath, long[] data, int dataSize,
+            long offset)
+    {
+        dateTimeWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    //
+    // Duration
+    //
+
+    @Override
+    public void createTimeDurationArray(String objectPath, int size, HDF5TimeUnit timeUnit,
+            HDF5GenericStorageFeatures features)
+    {
+        timeDurationWriter.createArray(objectPath, size, timeUnit, features);
+    }
+
+    @Override
+    public void createTimeDurationArray(String objectPath, int size, HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.createArray(objectPath, size, timeUnit);
+    }
+
+    @Override
+    public void createTimeDurationArray(String objectPath, long size, int blockSize,
+            HDF5TimeUnit timeUnit, HDF5GenericStorageFeatures features)
+    {
+        timeDurationWriter.createArray(objectPath, size, blockSize, timeUnit, features);
+    }
+
+    @Override
+    public void createTimeDurationArray(String objectPath, long size, int blockSize,
+            HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.createArray(objectPath, size, blockSize, timeUnit);
+    }
+
+    @Override
+    public void writeTimeDuration(String objectPath, long timeDuration, HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.write(objectPath, timeDuration, timeUnit);
+    }
+
+    @Override
+    public void writeTimeDuration(String objectPath, HDF5TimeDuration timeDuration)
+    {
+        timeDurationWriter.write(objectPath, timeDuration);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDuration(String objectPath, long timeDuration)
+    {
+        timeDurationWriter.writeTimeDuration(objectPath, timeDuration);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArray(String objectPath, long[] timeDurations,
+            HDF5TimeUnit timeUnit, HDF5IntStorageFeatures features)
+    {
+        timeDurationWriter.writeTimeDurationArray(objectPath, timeDurations, timeUnit, features);
+    }
+
+    @Override
+    public void writeTimeDurationArray(String objectPath, HDF5TimeDurationArray timeDurations)
+    {
+        timeDurationWriter.writeArray(objectPath, timeDurations);
+    }
+
+    @Override
+    public void writeTimeDurationArray(String objectPath, HDF5TimeDurationArray timeDurations,
+            HDF5IntStorageFeatures features)
+    {
+        timeDurationWriter.writeArray(objectPath, timeDurations, features);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArray(String objectPath, long[] timeDurations,
+            HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.writeTimeDurationArray(objectPath, timeDurations, timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArray(String objectPath, long[] timeDurations)
+    {
+        timeDurationWriter.writeTimeDurationArray(objectPath, timeDurations);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArray(String objectPath, HDF5TimeDuration[] timeDurations)
+    {
+        timeDurationWriter.writeTimeDurationArray(objectPath, timeDurations);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArray(String objectPath, HDF5TimeDuration[] timeDurations,
+            HDF5IntStorageFeatures features)
+    {
+        timeDurationWriter.writeTimeDurationArray(objectPath, timeDurations, features);
+    }
+
+    @Override
+    public void writeTimeDurationArrayBlock(String objectPath, HDF5TimeDurationArray data,
+            long blockNumber)
+    {
+        timeDurationWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeTimeDurationArrayBlockWithOffset(String objectPath,
+            HDF5TimeDurationArray data, int dataSize, long offset)
+    {
+        timeDurationWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArrayBlock(String objectPath, long[] data, long blockNumber,
+            HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.writeTimeDurationArrayBlock(objectPath, data, blockNumber, timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArrayBlockWithOffset(String objectPath, long[] data, int dataSize,
+            long offset, HDF5TimeUnit timeUnit)
+    {
+        timeDurationWriter.writeTimeDurationArrayBlockWithOffset(objectPath, data, dataSize,
+                offset, timeUnit);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArrayBlock(String objectPath, HDF5TimeDuration[] data,
+            long blockNumber)
+    {
+        timeDurationWriter.writeTimeDurationArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    @Deprecated
+    public void writeTimeDurationArrayBlockWithOffset(String objectPath, HDF5TimeDuration[] data,
+            int dataSize, long offset)
+    {
+        timeDurationWriter
+                .writeTimeDurationArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    //
+    // References
+    //
+
+    @Override
+    public IHDF5ReferenceWriter reference()
+    {
+        return referenceWriter;
+    }
+
+    @Override
+    public void writeObjectReference(String objectPath, String referencedObjectPath)
+    {
+        referenceWriter.write(objectPath, referencedObjectPath);
+    }
+
+    @Override
+    public void writeObjectReferenceArray(String objectPath, String[] referencedObjectPath)
+    {
+        referenceWriter.writeArray(objectPath, referencedObjectPath);
+    }
+
+    @Override
+    public void writeObjectReferenceArray(String objectPath, String[] referencedObjectPath,
+            HDF5IntStorageFeatures features)
+    {
+        referenceWriter.writeArray(objectPath, referencedObjectPath, features);
+    }
+
+    @Override
+    public void writeObjectReferenceMDArray(String objectPath, MDArray<String> referencedObjectPaths)
+    {
+        referenceWriter.writeMDArray(objectPath, referencedObjectPaths);
+    }
+
+    @Override
+    public void writeObjectReferenceMDArray(String objectPath,
+            MDArray<String> referencedObjectPaths, HDF5IntStorageFeatures features)
+    {
+        referenceWriter.writeMDArray(objectPath, referencedObjectPaths, features);
+    }
+
+    @Override
+    public void setObjectReferenceAttribute(String objectPath, String name,
+            String referencedObjectPath)
+    {
+        referenceWriter.setAttr(objectPath, name, referencedObjectPath);
+    }
+
+    @Override
+    public void setObjectReferenceArrayAttribute(String objectPath, String name, String[] value)
+    {
+        referenceWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    //
+    // String
+    //
+
+    @Override
+    public IHDF5StringWriter string()
+    {
+        return stringWriter;
+    }
+
+    @Override
+    public void setObjectReferenceMDArrayAttribute(String objectPath, String name,
+            MDArray<String> referencedObjectPaths)
+    {
+        referenceWriter.setMDArrayAttr(objectPath, name, referencedObjectPaths);
+    }
+
+    @Override
+    public void createObjectReferenceArray(String objectPath, int size)
+    {
+        referenceWriter.createArray(objectPath, size);
+    }
+
+    @Override
+    public void createObjectReferenceArray(String objectPath, long size, int blockSize)
+    {
+        referenceWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createObjectReferenceArray(String objectPath, int size,
+            HDF5IntStorageFeatures features)
+    {
+        referenceWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createObjectReferenceArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        referenceWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void writeObjectReferenceArrayBlock(String objectPath, String[] referencedObjectPaths,
+            long blockNumber)
+    {
+        referenceWriter.writeArrayBlock(objectPath, referencedObjectPaths, blockNumber);
+    }
+
+    @Override
+    public void writeObjectReferenceArrayBlockWithOffset(String objectPath,
+            String[] referencedObjectPaths, int dataSize, long offset)
+    {
+        referenceWriter.writeArrayBlockWithOffset(objectPath, referencedObjectPaths, dataSize,
+                offset);
+    }
+
+    @Override
+    public void createObjectReferenceMDArray(String objectPath, int[] dimensions)
+    {
+        referenceWriter.createMDArray(objectPath, dimensions);
+    }
+
+    @Override
+    public void createObjectReferenceMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions)
+    {
+        referenceWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createObjectReferenceMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features)
+    {
+        referenceWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createObjectReferenceMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features)
+    {
+        referenceWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void writeObjectReferenceMDArrayBlock(String objectPath,
+            MDArray<String> referencedObjectPaths, long[] blockNumber)
+    {
+        referenceWriter.writeMDArrayBlock(objectPath, referencedObjectPaths, blockNumber);
+    }
+
+    @Override
+    public void writeObjectReferenceMDArrayBlockWithOffset(String objectPath,
+            MDArray<String> referencedObjectPaths, long[] offset)
+    {
+        referenceWriter.writeMDArrayBlockWithOffset(objectPath, referencedObjectPaths, offset);
+    }
+
+    @Override
+    public void writeObjectReferenceMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        referenceWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void createStringArray(String objectPath, int maxLength, int size)
+    {
+        stringWriter.createArray(objectPath, maxLength, size);
+    }
+
+    @Override
+    public void createStringArray(String objectPath, int maxLength, long size, int blockSize)
+    {
+        stringWriter.createArray(objectPath, maxLength, size, blockSize);
+    }
+
+    @Override
+    public void createStringArray(String objectPath, int maxLength, int size,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createArray(objectPath, maxLength, size, features);
+    }
+
+    @Override
+    public void createStringArray(String objectPath, int maxLength, long size, int blockSize,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createArray(objectPath, maxLength, size, blockSize, features);
+    }
+
+    @Override
+    public void createStringVariableLengthArray(String objectPath, int size)
+    {
+        stringWriter.createArrayVL(objectPath, size);
+    }
+
+    @Override
+    public void createStringVariableLengthArray(String objectPath, long size, int blockSize)
+    {
+        stringWriter.createArrayVL(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createStringVariableLengthArray(String objectPath, long size, int blockSize,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createArrayVL(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createStringVariableLengthArray(String objectPath, int size,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createArrayVL(objectPath, size, features);
+    }
+
+    @Override
+    public void setStringAttribute(String objectPath, String name, String value)
+    {
+        stringWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setStringAttribute(String objectPath, String name, String value, int maxLength)
+    {
+        stringWriter.setAttr(objectPath, name, value, maxLength);
+    }
+
+    @Override
+    public void setStringArrayAttribute(String objectPath, String name, String[] value,
+            int maxLength)
+    {
+        stringWriter.setArrayAttr(objectPath, name, value, maxLength);
+    }
+
+    @Override
+    public void setStringArrayAttribute(String objectPath, String name, String[] value)
+    {
+        stringWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setStringMDArrayAttribute(String objectPath, String name, MDArray<String> value)
+    {
+        stringWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setStringMDArrayAttribute(String objectPath, String name, MDArray<String> value,
+            int maxLength)
+    {
+        stringWriter.setMDArrayAttr(objectPath, name, value, maxLength);
+    }
+
+    @Override
+    public void setStringAttributeVariableLength(String objectPath, String name, String value)
+    {
+        stringWriter.setAttrVL(objectPath, name, value);
+    }
+
+    @Override
+    public void writeString(String objectPath, String data, int maxLength)
+    {
+        stringWriter.write(objectPath, data, maxLength);
+    }
+
+    @Override
+    public void writeString(String objectPath, String data)
+    {
+        stringWriter.write(objectPath, data);
+    }
+
+    @Override
+    public void writeString(String objectPath, String data, HDF5GenericStorageFeatures features)
+    {
+        stringWriter.write(objectPath, data, features);
+    }
+
+    @Override
+    public void writeString(String objectPath, String data, int maxLength,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.write(objectPath, data, maxLength, features);
+    }
+
+    @Override
+    public void writeStringArray(String objectPath, String[] data,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeStringArray(String objectPath, String[] data)
+    {
+        stringWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeStringArray(String objectPath, String[] data, int maxLength)
+    {
+        stringWriter.writeArray(objectPath, data, maxLength);
+    }
+
+    @Override
+    public void writeStringArray(String objectPath, String[] data, int maxLength,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeArray(objectPath, data, maxLength, features);
+    }
+
+    @Override
+    public void createStringMDArray(String objectPath, int maxLength, int[] dimensions,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createMDArray(objectPath, maxLength, dimensions, features);
+    }
+
+    @Override
+    public void createStringMDArray(String objectPath, int maxLength, int[] dimensions)
+    {
+        stringWriter.createMDArray(objectPath, maxLength, dimensions);
+    }
+
+    @Override
+    public void createStringMDArray(String objectPath, int maxLength, long[] dimensions,
+            int[] blockSize, HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createMDArray(objectPath, maxLength, dimensions, blockSize, features);
+    }
+
+    @Override
+    public void createStringMDArray(String objectPath, int maxLength, long[] dimensions,
+            int[] blockSize)
+    {
+        stringWriter.createMDArray(objectPath, maxLength, dimensions, blockSize);
+    }
+
+    @Override
+    public void writeStringMDArray(String objectPath, MDArray<String> data, int maxLength)
+    {
+        stringWriter.writeMDArray(objectPath, data, maxLength);
+    }
+
+    @Override
+    public void writeStringMDArray(String objectPath, MDArray<String> data)
+    {
+        stringWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeStringMDArray(String objectPath, MDArray<String> data,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeStringMDArray(String objectPath, MDArray<String> data, int maxLength,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeMDArray(objectPath, data, maxLength, features);
+    }
+
+    @Override
+    public void writeStringArrayBlock(String objectPath, String[] data, long blockNumber)
+    {
+        stringWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeStringArrayBlockWithOffset(String objectPath, String[] data, int dataSize,
+            long offset)
+    {
+        stringWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeStringMDArrayBlock(String objectPath, MDArray<String> data, long[] blockNumber)
+    {
+        stringWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeStringMDArrayBlockWithOffset(String objectPath, MDArray<String> data,
+            long[] offset)
+    {
+        stringWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeStringVariableLength(String objectPath, String data)
+    {
+        stringWriter.writeVL(objectPath, data);
+    }
+
+    @Override
+    public void writeStringVariableLengthArray(String objectPath, String[] data)
+    {
+        stringWriter.writeArrayVL(objectPath, data);
+    }
+
+    @Override
+    public void writeStringVariableLengthArray(String objectPath, String[] data,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeArrayVL(objectPath, data, features);
+    }
+
+    @Override
+    public void writeStringVariableLengthMDArray(String objectPath, MDArray<String> data,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.writeMDArrayVL(objectPath, data, features);
+    }
+
+    @Override
+    public void writeStringVariableLengthMDArray(String objectPath, MDArray<String> data)
+    {
+        stringWriter.writeMDArrayVL(objectPath, data);
+    }
+
+    @Override
+    public void createStringVariableLengthMDArray(String objectPath, int[] dimensions,
+            HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createMDArrayVL(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createStringVariableLengthMDArray(String objectPath, int[] dimensions)
+    {
+        stringWriter.createMDArrayVL(objectPath, dimensions);
+    }
+
+    @Override
+    public void createStringVariableLengthMDArray(String objectPath, long[] dimensions,
+            int[] blockSize, HDF5GenericStorageFeatures features)
+    {
+        stringWriter.createMDArrayVL(objectPath, dimensions, blockSize, features);
+    }
+
+    @Override
+    public void createStringVariableLengthMDArray(String objectPath, long[] dimensions,
+            int[] blockSize)
+    {
+        stringWriter.createMDArrayVL(objectPath, dimensions, blockSize);
+    }
+
+    //
+    // Enum
+    //
+
+    @Override
+    public IHDF5EnumWriter enums()
+    {
+        return enumWriter;
+    }
+
+    @Override
+    public IHDF5EnumWriter enumeration()
+    {
+        return enumWriter;
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumType(final String name, final String[] values)
+            throws HDF5JavaException
+    {
+        return enumWriter.getType(name, values);
+    }
+
+    @Override
+    public HDF5EnumerationType getEnumType(final String name, final String[] values,
+            final boolean check) throws HDF5JavaException
+    {
+        return enumWriter.getType(name, values, check);
+    }
+
+    @Override
+    public HDF5EnumerationType createEnumArray(String objectPath, HDF5EnumerationType enumType,
+            int size)
+    {
+        return enumWriter.createArray(objectPath, enumType, size);
+    }
+
+    @Override
+    public HDF5EnumerationType createEnumArray(String objectPath, HDF5EnumerationType enumType,
+            long size, HDF5IntStorageFeatures features)
+    {
+        return enumWriter.createArray(objectPath, enumType, size, features);
+    }
+
+    @Override
+    public HDF5EnumerationType createEnumArray(String objectPath, HDF5EnumerationType enumType,
+            long size, int blockSize, HDF5IntStorageFeatures features)
+    {
+        return enumWriter.createArray(objectPath, enumType, size, blockSize, features);
+    }
+
+    @Override
+    public HDF5EnumerationType createEnumArray(String objectPath, HDF5EnumerationType enumType,
+            long size, int blockSize)
+    {
+        return enumWriter.createArray(objectPath, enumType, size, blockSize);
+    }
+
+    @Override
+    public void setEnumAttribute(String objectPath, String name, HDF5EnumerationValue value)
+    {
+        enumWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setEnumAttribute(String objectPath, String name, Enum<?> value)
+            throws HDF5JavaException
+    {
+        enumWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public <T extends Enum<T>> void writeEnum(String objectPath, Enum<T> value)
+            throws HDF5JavaException
+    {
+        enumWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeEnum(String objectPath, String[] options, String value)
+    {
+        enumWriter.write(objectPath, enumWriter.newAnonVal(options, value));
+    }
+
+    @Override
+    public <T extends Enum<T>> void writeEnumArray(String objectPath, Enum<T>[] data)
+    {
+        enumWriter.writeArray(objectPath, enumWriter.newAnonArray(data));
+    }
+
+    @Override
+    public void writeEnumArray(String objectPath, String[] options, String[] data)
+    {
+        enumWriter.writeArray(objectPath, enumWriter.newAnonArray(options, data));
+    }
+
+    @Override
+    public void setEnumArrayAttribute(String objectPath, String name,
+            HDF5EnumerationValueArray value)
+    {
+        enumWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeEnum(String objectPath, HDF5EnumerationValue value) throws HDF5JavaException
+    {
+        enumWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeEnumArray(String objectPath, HDF5EnumerationValueArray data,
+            HDF5IntStorageFeatures features) throws HDF5JavaException
+    {
+        enumWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeEnumArray(String objectPath, HDF5EnumerationValueArray data)
+            throws HDF5JavaException
+    {
+        enumWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeEnumArrayBlock(String objectPath, HDF5EnumerationValueArray data,
+            long blockNumber)
+    {
+        enumWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeEnumArrayBlockWithOffset(String objectPath, HDF5EnumerationValueArray data,
+            int dataSize, long offset)
+    {
+        enumWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    //
+    // Compound
+    //
+
+    @Override
+    public IHDF5CompoundWriter compounds()
+    {
+        return compoundWriter;
+    }
+
+    @Override
+    public IHDF5CompoundWriter compound()
+    {
+        return compoundWriter;
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getCompoundType(final String name, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return compoundWriter.getType(name, pojoClass, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getCompoundType(Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members)
+    {
+        return compoundWriter.getType(pojoClass, members);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final String name, Class<T> pojoClass)
+    {
+        return compoundWriter.getInferredType(name, pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(Class<T> pojoClass)
+    {
+        return compoundWriter.getInferredType(pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final String name, T template)
+    {
+        return compoundWriter.getInferredType(name, template);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getInferredCompoundType(T template)
+    {
+        return compoundWriter.getInferredType(template);
+    }
+
+    @Override
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, int size)
+    {
+        compoundWriter.createArray(objectPath, type, size);
+    }
+
+    @Override
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.createArray(objectPath, type, size, features);
+    }
+
+    @Override
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize, HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.createArray(objectPath, type, size, blockSize, features);
+    }
+
+    @Override
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize)
+    {
+        compoundWriter.createArray(objectPath, type, size, blockSize);
+    }
+
+    @Override
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            int[] dimensions, HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.createMDArray(objectPath, type, dimensions, features);
+    }
+
+    @Override
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            int[] dimensions)
+    {
+        compoundWriter.createMDArray(objectPath, type, dimensions);
+    }
+
+    @Override
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            long[] dimensions, int[] blockDimensions, HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.createMDArray(objectPath, type, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            long[] dimensions, int[] blockDimensions)
+    {
+        compoundWriter.createMDArray(objectPath, type, dimensions, blockDimensions);
+    }
+
+    @Override
+    public <T> void writeCompound(String objectPath, HDF5CompoundType<T> type, T data,
+            IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.write(objectPath, type, data, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompound(String objectPath, HDF5CompoundType<T> type, T data)
+    {
+        compoundWriter.write(objectPath, type, data);
+    }
+
+    @Override
+    public <T> void writeCompound(String objectPath, T data)
+    {
+        compoundWriter.write(objectPath, data);
+    }
+
+    @Override
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features, IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeArray(objectPath, type, data, features, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.writeArray(objectPath, type, data, features);
+    }
+
+    @Override
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data)
+    {
+        compoundWriter.writeArray(objectPath, type, data);
+    }
+
+    @Override
+    public <T> void writeCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber, IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeArrayBlock(objectPath, type, data, blockNumber, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber)
+    {
+        compoundWriter.writeArrayBlock(objectPath, type, data, blockNumber);
+    }
+
+    @Override
+    public <T> void writeCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset, IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeArrayBlockWithOffset(objectPath, type, data, offset, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset)
+    {
+        compoundWriter.writeArrayBlockWithOffset(objectPath, type, data, offset);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, HDF5GenericStorageFeatures features,
+            IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeMDArray(objectPath, type, data, features, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.writeMDArray(objectPath, type, data, features);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data)
+    {
+        compoundWriter.writeMDArray(objectPath, type, data);
+    }
+
+    @Override
+    public <T> void writeCompoundArray(String objectPath, T[] data)
+    {
+        compoundWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public <T> void writeCompoundArray(String objectPath, T[] data,
+            HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArray(String objectPath, MDArray<T> data)
+    {
+        compoundWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArray(String objectPath, MDArray<T> data,
+            HDF5GenericStorageFeatures features)
+    {
+        compoundWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] blockDimensions, IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeMDArrayBlock(objectPath, type, data, blockDimensions, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] blockDimensions)
+    {
+        compoundWriter.writeMDArrayBlock(objectPath, type, data, blockDimensions);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, int[] blockDimensions, long[] offset,
+            int[] memoryOffset, IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeMDArrayBlockWithOffset(objectPath, type, data, blockDimensions, offset,
+                memoryOffset, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, int[] blockDimensions, long[] offset,
+            int[] memoryOffset)
+    {
+        compoundWriter.writeMDArrayBlockWithOffset(objectPath, type, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, long[] offset,
+            IByteArrayInspector inspectorOrNull)
+    {
+        compoundWriter.writeMDArrayBlockWithOffset(objectPath, type, data, offset, inspectorOrNull);
+    }
+
+    @Override
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, long[] offset)
+    {
+        compoundWriter.writeMDArrayBlockWithOffset(objectPath, type, data, offset);
+    }
+
+    @Override
+    public <T> HDF5CompoundMemberInformation[] getCompoundMemberInformation(Class<T> compoundClass)
+    {
+        return compoundWriter.getMemberInfo(compoundClass);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getCompoundMemberInformation(String dataTypeName)
+    {
+        return compoundWriter.getMemberInfo(dataTypeName);
+    }
+
+    @Override
+    public HDF5CompoundMemberInformation[] getCompoundDataSetInformation(String dataSetPath)
+            throws HDF5JavaException
+    {
+        return compoundWriter.getDataSetInfo(dataSetPath);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredCompoundType(String name, List<String> memberNames,
+            List<?> template)
+    {
+        return compoundWriter.getInferredType(name, memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<List<?>> getInferredCompoundType(List<String> memberNames,
+            List<?> template)
+    {
+        return compoundWriter.getInferredType(memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredCompoundType(String name, String[] memberNames,
+            Object[] template)
+    {
+        return compoundWriter.getInferredType(name, memberNames, template);
+    }
+
+    @Override
+    public HDF5CompoundType<Object[]> getInferredCompoundType(String[] memberNames,
+            Object[] template)
+    {
+        return compoundWriter.getInferredType(memberNames, template);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getDataSetCompoundType(String objectPath, Class<T> pojoClass)
+    {
+        return compoundWriter.getDataSetType(objectPath, pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedCompoundType(String dataTypeName, Class<T> pojoClass)
+    {
+        return compoundWriter.getNamedType(dataTypeName, pojoClass);
+    }
+
+    @Override
+    public <T> HDF5CompoundType<T> getNamedCompoundType(Class<T> pojoClass)
+    {
+        return compoundWriter.getNamedType(pojoClass);
+    }
+
+    // ------------------------------------------------------------------------------
+    // Primitive types - START
+    // ------------------------------------------------------------------------------
+
+    @Override
+    public void createByteArray(String objectPath, int blockSize)
+    {
+        byteWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createByteArray(String objectPath, long size, int blockSize)
+    {
+        byteWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createByteArray(String objectPath, int size, HDF5IntStorageFeatures features)
+    {
+        byteWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createByteArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        byteWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createByteMDArray(String objectPath, int[] blockDimensions)
+    {
+        byteWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createByteMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        byteWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createByteMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features)
+    {
+        byteWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createByteMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5IntStorageFeatures features)
+    {
+        byteWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createByteMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        byteWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createByteMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        byteWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createByteMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5IntStorageFeatures features)
+    {
+        byteWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setByteArrayAttribute(String objectPath, String name, byte[] value)
+    {
+        byteWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setByteAttribute(String objectPath, String name, byte value)
+    {
+        byteWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setByteMDArrayAttribute(String objectPath, String name, MDByteArray value)
+    {
+        byteWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setByteMatrixAttribute(String objectPath, String name, byte[][] value)
+    {
+        byteWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeByte(String objectPath, byte value)
+    {
+        byteWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeByteArray(String objectPath, byte[] data)
+    {
+        byteWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeByteArray(String objectPath, byte[] data, HDF5IntStorageFeatures features)
+    {
+        byteWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeByteArrayBlock(String objectPath, byte[] data, long blockNumber)
+    {
+        byteWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeByteArrayBlockWithOffset(String objectPath, byte[] data, int dataSize,
+            long offset)
+    {
+        byteWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeByteMDArray(String objectPath, MDByteArray data)
+    {
+        byteWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeByteMDArray(String objectPath, MDByteArray data,
+            HDF5IntStorageFeatures features)
+    {
+        byteWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeByteMDArrayBlock(String objectPath, MDByteArray data, long[] blockNumber)
+    {
+        byteWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeByteMDArrayBlockWithOffset(String objectPath, MDByteArray data, long[] offset)
+    {
+        byteWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeByteMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        byteWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeByteMatrix(String objectPath, byte[][] data)
+    {
+        byteWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeByteMatrix(String objectPath, byte[][] data, HDF5IntStorageFeatures features)
+    {
+        byteWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeByteMatrixBlock(String objectPath, byte[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        byteWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeByteMatrixBlockWithOffset(String objectPath, byte[][] data, long offsetX,
+            long offsetY)
+    {
+        byteWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeByteMatrixBlockWithOffset(String objectPath, byte[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        byteWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public void createDoubleArray(String objectPath, int blockSize)
+    {
+        doubleWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createDoubleArray(String objectPath, long size, int blockSize)
+    {
+        doubleWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createDoubleArray(String objectPath, int size, HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createDoubleArray(String objectPath, long size, int blockSize,
+            HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createDoubleMDArray(String objectPath, int[] blockDimensions)
+    {
+        doubleWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createDoubleMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        doubleWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createDoubleMDArray(String objectPath, int[] dimensions,
+            HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createDoubleMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createDoubleMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        doubleWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createDoubleMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        doubleWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createDoubleMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setDoubleArrayAttribute(String objectPath, String name, double[] value)
+    {
+        doubleWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setDoubleAttribute(String objectPath, String name, double value)
+    {
+        doubleWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setDoubleMDArrayAttribute(String objectPath, String name, MDDoubleArray value)
+    {
+        doubleWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setDoubleMatrixAttribute(String objectPath, String name, double[][] value)
+    {
+        doubleWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeDouble(String objectPath, double value)
+    {
+        doubleWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeDoubleArray(String objectPath, double[] data)
+    {
+        doubleWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeDoubleArray(String objectPath, double[] data, HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeDoubleArrayBlock(String objectPath, double[] data, long blockNumber)
+    {
+        doubleWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeDoubleArrayBlockWithOffset(String objectPath, double[] data, int dataSize,
+            long offset)
+    {
+        doubleWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeDoubleMDArray(String objectPath, MDDoubleArray data)
+    {
+        doubleWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeDoubleMDArray(String objectPath, MDDoubleArray data,
+            HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeDoubleMDArrayBlock(String objectPath, MDDoubleArray data, long[] blockNumber)
+    {
+        doubleWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeDoubleMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset)
+    {
+        doubleWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeDoubleMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        doubleWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeDoubleMatrix(String objectPath, double[][] data)
+    {
+        doubleWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeDoubleMatrix(String objectPath, double[][] data,
+            HDF5FloatStorageFeatures features)
+    {
+        doubleWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeDoubleMatrixBlock(String objectPath, double[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        doubleWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeDoubleMatrixBlockWithOffset(String objectPath, double[][] data, long offsetX,
+            long offsetY)
+    {
+        doubleWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeDoubleMatrixBlockWithOffset(String objectPath, double[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        doubleWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public void createFloatArray(String objectPath, int blockSize)
+    {
+        floatWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createFloatArray(String objectPath, long size, int blockSize)
+    {
+        floatWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createFloatArray(String objectPath, int size, HDF5FloatStorageFeatures features)
+    {
+        floatWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createFloatArray(String objectPath, long size, int blockSize,
+            HDF5FloatStorageFeatures features)
+    {
+        floatWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createFloatMDArray(String objectPath, int[] blockDimensions)
+    {
+        floatWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createFloatMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        floatWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createFloatMDArray(String objectPath, int[] dimensions,
+            HDF5FloatStorageFeatures features)
+    {
+        floatWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createFloatMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5FloatStorageFeatures features)
+    {
+        floatWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createFloatMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        floatWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createFloatMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        floatWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createFloatMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5FloatStorageFeatures features)
+    {
+        floatWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setFloatArrayAttribute(String objectPath, String name, float[] value)
+    {
+        floatWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setFloatAttribute(String objectPath, String name, float value)
+    {
+        floatWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setFloatMDArrayAttribute(String objectPath, String name, MDFloatArray value)
+    {
+        floatWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setFloatMatrixAttribute(String objectPath, String name, float[][] value)
+    {
+        floatWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeFloat(String objectPath, float value)
+    {
+        floatWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeFloatArray(String objectPath, float[] data)
+    {
+        floatWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeFloatArray(String objectPath, float[] data, HDF5FloatStorageFeatures features)
+    {
+        floatWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeFloatArrayBlock(String objectPath, float[] data, long blockNumber)
+    {
+        floatWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeFloatArrayBlockWithOffset(String objectPath, float[] data, int dataSize,
+            long offset)
+    {
+        floatWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeFloatMDArray(String objectPath, MDFloatArray data)
+    {
+        floatWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeFloatMDArray(String objectPath, MDFloatArray data,
+            HDF5FloatStorageFeatures features)
+    {
+        floatWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeFloatMDArrayBlock(String objectPath, MDFloatArray data, long[] blockNumber)
+    {
+        floatWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeFloatMDArrayBlockWithOffset(String objectPath, MDFloatArray data, long[] offset)
+    {
+        floatWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeFloatMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        floatWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeFloatMatrix(String objectPath, float[][] data)
+    {
+        floatWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeFloatMatrix(String objectPath, float[][] data,
+            HDF5FloatStorageFeatures features)
+    {
+        floatWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeFloatMatrixBlock(String objectPath, float[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        floatWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeFloatMatrixBlockWithOffset(String objectPath, float[][] data, long offsetX,
+            long offsetY)
+    {
+        floatWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeFloatMatrixBlockWithOffset(String objectPath, float[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        floatWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public void createIntArray(String objectPath, int blockSize)
+    {
+        intWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createIntArray(String objectPath, long size, int blockSize)
+    {
+        intWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createIntArray(String objectPath, int size, HDF5IntStorageFeatures features)
+    {
+        intWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createIntArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        intWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createIntMDArray(String objectPath, int[] blockDimensions)
+    {
+        intWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createIntMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        intWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createIntMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features)
+    {
+        intWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createIntMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5IntStorageFeatures features)
+    {
+        intWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createIntMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        intWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createIntMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        intWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createIntMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5IntStorageFeatures features)
+    {
+        intWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setIntArrayAttribute(String objectPath, String name, int[] value)
+    {
+        intWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setIntAttribute(String objectPath, String name, int value)
+    {
+        intWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setIntMDArrayAttribute(String objectPath, String name, MDIntArray value)
+    {
+        intWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setIntMatrixAttribute(String objectPath, String name, int[][] value)
+    {
+        intWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeInt(String objectPath, int value)
+    {
+        intWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeIntArray(String objectPath, int[] data)
+    {
+        intWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeIntArray(String objectPath, int[] data, HDF5IntStorageFeatures features)
+    {
+        intWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeIntArrayBlock(String objectPath, int[] data, long blockNumber)
+    {
+        intWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeIntArrayBlockWithOffset(String objectPath, int[] data, int dataSize,
+            long offset)
+    {
+        intWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeIntMDArray(String objectPath, MDIntArray data)
+    {
+        intWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeIntMDArray(String objectPath, MDIntArray data, HDF5IntStorageFeatures features)
+    {
+        intWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeIntMDArrayBlock(String objectPath, MDIntArray data, long[] blockNumber)
+    {
+        intWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeIntMDArrayBlockWithOffset(String objectPath, MDIntArray data, long[] offset)
+    {
+        intWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeIntMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        intWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeIntMatrix(String objectPath, int[][] data)
+    {
+        intWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeIntMatrix(String objectPath, int[][] data, HDF5IntStorageFeatures features)
+    {
+        intWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeIntMatrixBlock(String objectPath, int[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        intWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeIntMatrixBlockWithOffset(String objectPath, int[][] data, long offsetX,
+            long offsetY)
+    {
+        intWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeIntMatrixBlockWithOffset(String objectPath, int[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        intWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public void createLongArray(String objectPath, int blockSize)
+    {
+        longWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createLongArray(String objectPath, long size, int blockSize)
+    {
+        longWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createLongArray(String objectPath, int size, HDF5IntStorageFeatures features)
+    {
+        longWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createLongArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        longWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createLongMDArray(String objectPath, int[] blockDimensions)
+    {
+        longWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createLongMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        longWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createLongMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features)
+    {
+        longWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createLongMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5IntStorageFeatures features)
+    {
+        longWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createLongMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        longWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createLongMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        longWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createLongMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5IntStorageFeatures features)
+    {
+        longWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setLongArrayAttribute(String objectPath, String name, long[] value)
+    {
+        longWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setLongAttribute(String objectPath, String name, long value)
+    {
+        longWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setLongMDArrayAttribute(String objectPath, String name, MDLongArray value)
+    {
+        longWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setLongMatrixAttribute(String objectPath, String name, long[][] value)
+    {
+        longWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeLong(String objectPath, long value)
+    {
+        longWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeLongArray(String objectPath, long[] data)
+    {
+        longWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeLongArray(String objectPath, long[] data, HDF5IntStorageFeatures features)
+    {
+        longWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeLongArrayBlock(String objectPath, long[] data, long blockNumber)
+    {
+        longWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeLongArrayBlockWithOffset(String objectPath, long[] data, int dataSize,
+            long offset)
+    {
+        longWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeLongMDArray(String objectPath, MDLongArray data)
+    {
+        longWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeLongMDArray(String objectPath, MDLongArray data,
+            HDF5IntStorageFeatures features)
+    {
+        longWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeLongMDArrayBlock(String objectPath, MDLongArray data, long[] blockNumber)
+    {
+        longWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeLongMDArrayBlockWithOffset(String objectPath, MDLongArray data, long[] offset)
+    {
+        longWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeLongMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        longWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeLongMatrix(String objectPath, long[][] data)
+    {
+        longWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeLongMatrix(String objectPath, long[][] data, HDF5IntStorageFeatures features)
+    {
+        longWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeLongMatrixBlock(String objectPath, long[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        longWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeLongMatrixBlockWithOffset(String objectPath, long[][] data, long offsetX,
+            long offsetY)
+    {
+        longWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeLongMatrixBlockWithOffset(String objectPath, long[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        longWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public void createShortArray(String objectPath, int blockSize)
+    {
+        shortWriter.createArray(objectPath, blockSize);
+    }
+
+    @Override
+    public void createShortArray(String objectPath, long size, int blockSize)
+    {
+        shortWriter.createArray(objectPath, size, blockSize);
+    }
+
+    @Override
+    public void createShortArray(String objectPath, int size, HDF5IntStorageFeatures features)
+    {
+        shortWriter.createArray(objectPath, size, features);
+    }
+
+    @Override
+    public void createShortArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features)
+    {
+        shortWriter.createArray(objectPath, size, blockSize, features);
+    }
+
+    @Override
+    public void createShortMDArray(String objectPath, int[] blockDimensions)
+    {
+        shortWriter.createMDArray(objectPath, blockDimensions);
+    }
+
+    @Override
+    public void createShortMDArray(String objectPath, long[] dimensions, int[] blockDimensions)
+    {
+        shortWriter.createMDArray(objectPath, dimensions, blockDimensions);
+    }
+
+    @Override
+    public void createShortMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features)
+    {
+        shortWriter.createMDArray(objectPath, dimensions, features);
+    }
+
+    @Override
+    public void createShortMDArray(String objectPath, long[] dimensions, int[] blockDimensions,
+            HDF5IntStorageFeatures features)
+    {
+        shortWriter.createMDArray(objectPath, dimensions, blockDimensions, features);
+    }
+
+    @Override
+    public void createShortMatrix(String objectPath, int blockSizeX, int blockSizeY)
+    {
+        shortWriter.createMatrix(objectPath, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createShortMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY)
+    {
+        shortWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY);
+    }
+
+    @Override
+    public void createShortMatrix(String objectPath, long sizeX, long sizeY, int blockSizeX,
+            int blockSizeY, HDF5IntStorageFeatures features)
+    {
+        shortWriter.createMatrix(objectPath, sizeX, sizeY, blockSizeX, blockSizeY, features);
+    }
+
+    @Override
+    public void setShortArrayAttribute(String objectPath, String name, short[] value)
+    {
+        shortWriter.setArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setShortAttribute(String objectPath, String name, short value)
+    {
+        shortWriter.setAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setShortMDArrayAttribute(String objectPath, String name, MDShortArray value)
+    {
+        shortWriter.setMDArrayAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void setShortMatrixAttribute(String objectPath, String name, short[][] value)
+    {
+        shortWriter.setMatrixAttr(objectPath, name, value);
+    }
+
+    @Override
+    public void writeShort(String objectPath, short value)
+    {
+        shortWriter.write(objectPath, value);
+    }
+
+    @Override
+    public void writeShortArray(String objectPath, short[] data)
+    {
+        shortWriter.writeArray(objectPath, data);
+    }
+
+    @Override
+    public void writeShortArray(String objectPath, short[] data, HDF5IntStorageFeatures features)
+    {
+        shortWriter.writeArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeShortArrayBlock(String objectPath, short[] data, long blockNumber)
+    {
+        shortWriter.writeArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeShortArrayBlockWithOffset(String objectPath, short[] data, int dataSize,
+            long offset)
+    {
+        shortWriter.writeArrayBlockWithOffset(objectPath, data, dataSize, offset);
+    }
+
+    @Override
+    public void writeShortMDArray(String objectPath, MDShortArray data)
+    {
+        shortWriter.writeMDArray(objectPath, data);
+    }
+
+    @Override
+    public void writeShortMDArray(String objectPath, MDShortArray data,
+            HDF5IntStorageFeatures features)
+    {
+        shortWriter.writeMDArray(objectPath, data, features);
+    }
+
+    @Override
+    public void writeShortMDArrayBlock(String objectPath, MDShortArray data, long[] blockNumber)
+    {
+        shortWriter.writeMDArrayBlock(objectPath, data, blockNumber);
+    }
+
+    @Override
+    public void writeShortMDArrayBlockWithOffset(String objectPath, MDShortArray data, long[] offset)
+    {
+        shortWriter.writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeShortMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset)
+    {
+        shortWriter.writeMDArrayBlockWithOffset(objectPath, data, blockDimensions, offset,
+                memoryOffset);
+    }
+
+    @Override
+    public void writeShortMatrix(String objectPath, short[][] data)
+    {
+        shortWriter.writeMatrix(objectPath, data);
+    }
+
+    @Override
+    public void writeShortMatrix(String objectPath, short[][] data, HDF5IntStorageFeatures features)
+    {
+        shortWriter.writeMatrix(objectPath, data, features);
+    }
+
+    @Override
+    public void writeShortMatrixBlock(String objectPath, short[][] data, long blockNumberX,
+            long blockNumberY)
+    {
+        shortWriter.writeMatrixBlock(objectPath, data, blockNumberX, blockNumberY);
+    }
+
+    @Override
+    public void writeShortMatrixBlockWithOffset(String objectPath, short[][] data, long offsetX,
+            long offsetY)
+    {
+        shortWriter.writeMatrixBlockWithOffset(objectPath, data, offsetX, offsetY);
+    }
+
+    @Override
+    public void writeShortMatrixBlockWithOffset(String objectPath, short[][] data, int dataSizeX,
+            int dataSizeY, long offsetX, long offsetY)
+    {
+        shortWriter.writeMatrixBlockWithOffset(objectPath, data, dataSizeX, dataSizeY, offsetX,
+                offsetY);
+    }
+
+    @Override
+    public IHDF5ByteWriter int8()
+    {
+        return byteWriter;
+    }
+
+    @Override
+    public IHDF5ByteWriter uint8()
+    {
+        return ubyteWriter;
+    }
+
+    @Override
+    public IHDF5ShortWriter int16()
+    {
+        return shortWriter;
+    }
+
+    @Override
+    public IHDF5ShortWriter uint16()
+    {
+        return ushortWriter;
+    }
+
+    @Override
+    public IHDF5IntWriter int32()
+    {
+        return intWriter;
+    }
+
+    @Override
+    public IHDF5IntWriter uint32()
+    {
+        return uintWriter;
+    }
+
+    @Override
+    public IHDF5LongWriter int64()
+    {
+        return longWriter;
+    }
+
+    @Override
+    public IHDF5LongWriter uint64()
+    {
+        return ulongWriter;
+    }
+
+    @Override
+    public IHDF5FloatWriter float32()
+    {
+        return floatWriter;
+    }
+
+    @Override
+    public IHDF5DoubleWriter float64()
+    {
+        return doubleWriter;
+    }
+
+    // ------------------------------------------------------------------------------
+    // Primitive Types - END
+    // ------------------------------------------------------------------------------
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/HDF5WriterConfigurator.java b/source/java/ch/systemsx/cisd/hdf5/HDF5WriterConfigurator.java
new file mode 100644
index 0000000..fc592e4
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/HDF5WriterConfigurator.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+import ch.systemsx.cisd.base.utilities.OSUtilities;
+
+/**
+ * The configuration of the writer is done by chaining calls to configuration methods before calling
+ * {@link #writer()}.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5WriterConfigurator extends HDF5ReaderConfigurator implements
+        IHDF5WriterConfigurator
+{
+
+    private boolean useExtentableDataTypes = true;
+
+    private boolean overwriteFile = false;
+
+    private boolean keepDataSetIfExists = false;
+
+    private boolean useSimpleDataSpaceForAttributes = false;
+
+    private FileFormat fileFormat = FileFormat.ALLOW_1_8;
+
+    private String houseKeepingNameSuffix = "";
+
+    // For Windows, use a blocking sync mode by default as otherwise the mandatory locks are up for
+    // some surprises after the file has been closed.
+    private SyncMode syncMode = OSUtilities.isWindows() ? SyncMode.SYNC_ON_FLUSH_BLOCK
+            : SyncMode.SYNC_ON_FLUSH;
+
+    public HDF5WriterConfigurator(File hdf5File)
+    {
+        super(hdf5File);
+    }
+
+    @Override
+    public HDF5WriterConfigurator overwrite()
+    {
+        this.overwriteFile = true;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator keepDataSetsIfTheyExist()
+    {
+        this.keepDataSetIfExists = true;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator dontUseExtendableDataTypes()
+    {
+        this.useExtentableDataTypes = false;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator useSimpleDataSpaceForAttributes()
+    {
+        this.useSimpleDataSpaceForAttributes = true;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator fileFormat(FileFormat newFileFormat)
+    {
+        this.fileFormat = newFileFormat;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator syncMode(SyncMode newSyncMode)
+    {
+        this.syncMode = newSyncMode;
+        return this;
+    }
+
+    @Override
+    public IHDF5WriterConfigurator houseKeepingNameSuffix(@SuppressWarnings("hiding")
+    String houseKeepingNameSuffix)
+    {
+        this.houseKeepingNameSuffix = houseKeepingNameSuffix;
+        return this;
+    }
+
+    @Override
+    public HDF5WriterConfigurator performNumericConversions()
+    {
+        return (HDF5WriterConfigurator) super.performNumericConversions();
+    }
+
+    @Override
+    public HDF5WriterConfigurator useUTF8CharacterEncoding()
+    {
+        return (HDF5WriterConfigurator) super.useUTF8CharacterEncoding();
+    }
+
+    @Override
+    public HDF5WriterConfigurator noAutoDereference()
+    {
+
+        return (HDF5WriterConfigurator) super.noAutoDereference();
+    }
+
+    @Override
+    public IHDF5Writer writer()
+    {
+        if (readerWriterOrNull == null)
+        {
+            readerWriterOrNull =
+                    new HDF5Writer(new HDF5BaseWriter(hdf5File, performNumericConversions,
+                            useUTF8CharEncoding, autoDereference, fileFormat,
+                            useExtentableDataTypes, overwriteFile, keepDataSetIfExists,
+                            useSimpleDataSpaceForAttributes, houseKeepingNameSuffix, syncMode));
+        }
+        return (HDF5Writer) readerWriterOrNull;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanReader.java
new file mode 100644
index 0000000..577dc8d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanReader.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.BitSet;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface that provides methods for reading boolean and bit field values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5BooleanReader
+{
+
+    /**
+     * Reads a <code>boolean</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not a boolean type.
+     */
+    public boolean getAttr(String objectPath, String attributeName) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>Boolean</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a boolean type.
+     */
+    public boolean read(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a bit field (which can be considered the equivalent to a boolean array of rank 1) from
+     * the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet readBitField(String objectPath) throws HDF5DatatypeInterfaceException;
+
+    /**
+     * Reads a block of a bit field (which can be considered the equivalent to a boolean array of
+     * rank 1) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block (in 64 bit words) to read.
+     * @param blockNumber The number of the block to read.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet readBitFieldBlock(String objectPath, int blockSize, long blockNumber);
+
+    /**
+     * Reads a block of a bit field (which can be considered the equivalent to a boolean array of
+     * rank 1) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block (in 64 bit words) to read.
+     * @param offset The offset of the block (in 64 bit words) to start reading from.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet readBitFieldBlockWithOffset(String objectPath, int blockSize, long offset);
+
+    /**
+     * Returns <code>true</code> if the <var>bitIndex</var> of the bit field dataset
+     * <var>objectPath</var> is set, <code>false</code> otherwise.
+     * <p>
+     * Will also return <code>false</code>, if <var>bitIndex</var> is outside of the bitfield
+     * dataset.
+     */
+    public boolean isBitSet(String objectPath, int bitIndex);
+
+    /**
+     * Reads a bit field array (which can be considered the equivalent to a boolean array of rank 2)
+     * from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeMatrix(String, long[][])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The {@link BitSet} array read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet[] readBitFieldArray(String objectPath);
+
+    /**
+     * Reads a block of a bit field array (which can be considered the equivalent to a boolean array
+     * of rank 2) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeMatrix(String, long[][])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the array block.
+     * @param offset The offset in the array where to start reading the block. 
+     * @return The {@link BitSet} array read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet[] readBitFieldArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a block of a bit field array (which can be considered the equivalent to a boolean array
+     * of rank 2) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeMatrix(String, long[][])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the array block.
+     * @param blockNumber The number of the array block (offset is <code>blockNumber * blockSize</code>.). 
+     * @return The {@link BitSet} array read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet[] readBitFieldArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanWriter.java
new file mode 100644
index 0000000..b350715
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5BooleanWriter.java
@@ -0,0 +1,337 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.BitSet;
+
+/**
+ * An interface that provides methods for writing <code>boolean</code> values to HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5BooleanWriter extends IHDF5BooleanReader
+{
+
+    /**
+     * Sets a <code>boolean</code> attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, boolean value);
+
+    /**
+     * Writes out a <code>boolean</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     */
+    public void write(String objectPath, boolean value);
+
+    /**
+     * Writes out a bit field ((which can be considered the equivalent to a boolean array of rank
+     * 1), provided as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeBitField(String objectPath, BitSet data);
+
+    /**
+     * Writes out a bit field ((which can be considered the equivalent to a boolean array of rank
+     * 1), provided as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeBitField(String objectPath, BitSet data, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. This will be the total
+     *            size for non-extendable data sets and the size of one chunk for extendable
+     *            (chunked) data sets. For extendable data sets the initial size of the array will
+     *            be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createBitField(String objectPath, int size);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createBitField(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a bit field array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. This will be the total
+     *            size for non-extendable data sets and the size of one chunk for extendable
+     *            (chunked) data sets. For extendable data sets the initial size of the array will
+     *            be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public void createBitField(String objectPath, int size, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>
+     *            .
+     * @param features The storage features of the data set.
+     */
+    public void createBitField(String objectPath, long size, int blockSize,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a bit field (of rank 1). The data set needs to have been created by
+     * {@link #createBitField(String, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createBitField(String, long, int, HDF5GenericStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code>
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeBitFieldBlock(String objectPath, BitSet data, int dataSize, long blockNumber);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createBitFieldArray(String, int, long, long, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeBitFieldBlock(String, BitSet, int, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createBitField(String, long, int, HDF5GenericStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code>
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeBitFieldBlockWithOffset(String objectPath, BitSet data, int dataSize,
+            long offset);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeBitFieldArray(String objectPath, BitSet[] data, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeBitFieldArray(String objectPath, BitSet[] data);
+
+    /**
+     * Creates an array of bit fields (of rank 1) (which can be considered the equivalent to a
+     * boolean array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param bitFieldSize The size of the bit fields in the array (in 64 bit words).
+     * @param arraySize The size of the array of bit fields to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param arrayBlockSize The size of one block (for block-wise IO). Ignored if no extendable
+     *            data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arraySize,
+            long arrayBlockSize, HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array of bit fields (of rank 1) (which can be considered the equivalent to a
+     * boolean array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param bitFieldSize The size of the bit fields in the array (in 64 bit words).
+     * @param arraySize The size of the array of bit fields to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param arrayBlockSize The size of one block (for block-wise IO). Ignored if no extendable
+     *            data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     */
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arraySize,
+            long arrayBlockSize);
+
+    /**
+     * Creates an empty array of bit fields (of rank 1) (which can be considered the equivalent to a
+     * boolean array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param bitFieldSize The size of the bit fields in the array (in 64 bit words).
+     * @param arraySize The size of the long array to create. When <i>requesting</i> a chunked data
+     *            set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), the initial size of the
+     *            array will be 0 and the chunk size will be <var>arraySize</var>. When
+     *            <i>allowing</i> a chunked data set (e.g.
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is not
+     *            configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When
+     *            <i>enforcing</i> a on-extendable data set (e.g.
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals the total size
+     *            and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arraySize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an empty array of bit fields (of rank 1) (which can be considered the equivalent to a
+     * boolean array of rank 2). This method always creates an extendable data sets.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param bitFieldSize The size of the bit fields in the array (in 64 bit words).
+     * @param arraySize When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When the writer is
+     *            configured to <i>enforce</i> a on-extendable data set, the initial size equals the
+     *            total size and will be <var>arraySize</var>.
+     */
+    public void createBitFieldArray(String objectPath, int bitFieldSize, long arraySize);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param dataSize The number of entries from <var>data</var> to write to the bitfield array
+     *            (must be smaller or equals to <code>data.length</code>.
+     * @param blockNumber The block number in the array (offset: multiply with
+     *            <var>dataSize</var>>).
+     */
+    public void writeBitFieldArrayBlock(String objectPath, BitSet[] data, int dataSize,
+            long blockNumber);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockNumber The block number in the array (offset: multiply with
+     *            <code>data.length</code>).
+     */
+    public void writeBitFieldArrayBlock(String objectPath, BitSet[] data, long blockNumber);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param dataSize The number of entries from <var>data</var> to write to the bitfield array
+     *            (must be smaller or equals to <code>data.length</code>.
+     * @param offset The offset in the bitfield array where to start writing the data.
+     */
+    public void writeBitFieldArrayBlockWithOffset(String objectPath, BitSet[] data, int dataSize,
+            long offset);
+
+    /**
+     * Writes out an array of bit fields (which can be considered the equivalent to a boolean array
+     * of rank 2), provided as a Java array of {@link BitSet}s.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param offset The offset in the bitfield array where to start writing the data.
+     */
+    public void writeBitFieldArrayBlockWithOffset(String objectPath, BitSet[] data, long offset);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteReader.java
new file mode 100644
index 0000000..528d7e9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+
+/**
+ * An interface that provides methods for reading <code>byte</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ByteReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>byte</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public byte getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>byte[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public byte[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>byte</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDByteArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>byte</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public byte[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>byte</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public byte read(String objectPath);
+
+    /**
+     * Reads a <code>byte</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public byte[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDByteArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>byte</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDByteArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>byte</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>byte[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public byte[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>byte</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>byte[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public byte[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public byte[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public byte[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public byte[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDByteArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>byte</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>byte</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDByteArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<byte[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDByteArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteWriter.java
new file mode 100644
index 0000000..868dde7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ByteWriter.java
@@ -0,0 +1,586 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+
+/**
+ * An interface that provides methods for writing <code>byte</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * <p>   
+ * <i>Note:</i> If you need to convert from and to unsigned values, use the methods of 
+ * {@link UnsignedIntUtils}.
+ * 
+ * @author Bernd Rinn
+ */
+ // Note: It is a trick for keeping backward compatibility to let this interface extend 
+ // IHDF5UnsignedByteWriter instead of IHDF5ByteReader as it logically should.
+ // Once we remove IHDF5UnsignedByteWriter, uncomment the following line and remove
+ // all @Override annotations and we are fine again.
+//public interface IHDF5ByteWriter extends IHDF5ByteReader
+ at SuppressWarnings("deprecation")
+public interface IHDF5ByteWriter extends IHDF5UnsignedByteWriter
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setAttr(String objectPath, String name, byte value);
+
+    /**
+     * Set a <code>byte[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setArrayAttr(String objectPath, String name, byte[] value);
+
+    /**
+     * Set a multi-dimensional code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, MDByteArray value);
+
+    /**
+     * Set a <code>byte[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMatrixAttr(String objectPath, String name, byte[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>byte</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    @Override
+    public void write(String objectPath, byte value);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    @Override
+    public void writeArray(String objectPath, byte[] data);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeArray(String objectPath, byte[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    @Override
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>byte</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    @Override
+    public void writeArrayBlock(String objectPath, byte[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, byte[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    @Override
+    public void writeArrayBlockWithOffset(String objectPath, byte[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(String objectPath, byte[][] data);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMatrix(String objectPath, byte[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    @Override
+    public void writeMatrixBlock(String objectPath, byte[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, byte[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, byte[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, byte[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, byte[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDByteArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDByteArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>byte</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDByteArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>byte</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDByteArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>byte</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    @Override
+    public void writeMDArrayBlock(String objectPath, MDByteArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>byte</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDByteArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>byte</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDByteArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>byte</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>byte</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicReader.java
new file mode 100644
index 0000000..2b16984
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicReader.java
@@ -0,0 +1,665 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever.IByteArrayInspector;
+
+/**
+ * An interface with legacy methods for reading compound values from HDF5 files. Do not use in any
+ * new code as it will be removed in a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5CompoundBasicReader
+{
+    // /////////////////////
+    // Information
+    // /////////////////////
+
+    /**
+     * Returns the member information for the committed compound data type <var>compoundClass</var>
+     * (using its "simple name") in the order that the members appear in the compound type. It is a
+     * failure condition if this compound data type does not exist.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundMemberInformation[] getCompoundMemberInformation(
+            final Class<T> compoundClass);
+
+    /**
+     * Returns the member information for the committed compound data type <var>dataTypeName</var>
+     * in the order that the members appear in the compound type. It is a failure condition if this
+     * compound data type does not exist. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the compound data type to get the member information for.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundMemberInformation[] getCompoundMemberInformation(final String dataTypeName);
+
+    /**
+     * Returns the compound member information for the data set <var>dataSetPath</var> in the order
+     * that the members appear in the compound type. It is a failure condition if this data set does
+     * not exist or is not of compound type.
+     * <p>
+     * Call <code>Arrays.sort(compoundInformation)</code> to sort the array in alphabetical order of
+     * names.
+     * 
+     * @throws HDF5JavaException If the data set is not of compound type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundMemberInformation[] getCompoundDataSetInformation(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the compound member information for the data set <var>dataSetPath</var> in the order
+     * that the members appear in the compound type. The returned array will contain the members in
+     * alphabetical order, if <var>sortAlphabetically</var> is <code>true</code> or else in the
+     * order of definition of the compound type. It is a failure condition if this data set does not
+     * exist or is not of compound type.
+     * <p>
+     * 
+     * @throws HDF5JavaException If the data set is not of type compound.
+     * @deprecated Use {@link #getCompoundDataSetInformation(String)} and
+     *             <code>Arrays.sort(compoundInformation)</code>, if needed.
+     */
+    @Deprecated
+    public HDF5CompoundMemberInformation[] getCompoundDataSetInformation(final String dataSetPath,
+            final boolean sortAlphabetically) throws HDF5JavaException;
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the compound type <var>name></var> for this HDF5 file.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getCompoundType(final String name, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the compound type for this HDF5 file, using the default name chosen by JHDF5 which is
+     * based on the simple name of <var>pojoClass</var>.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getCompoundType(Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the compound type <var>name></var> for this HDF5 file, inferring the mapping from the
+     * Java compound type to the HDF5 type by reflection.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final String name,
+            final Class<T> pojoClass);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java compound
+     * type to the HDF5 type by reflection and using the default name chosen by JHDF5 which is based
+     * on the simple name of <var>pojoClass</var>.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final Class<T> pojoClass);
+
+    /**
+     * Returns the compound type <var>name></var> for this HDF5 file, inferring the mapping from the
+     * Java compound type to the HDF5 type by reflection.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final String name, final T template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the compound type <var>name></var> for this HDF5 file, inferring the mapping from the
+     * Java compound type to the HDF5 type by reflection.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final String name, final T template);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java compound
+     * type to the HDF5 type by reflection and using the default name chosen by JHDF5 which is based
+     * on the simple name of <var>T</var>.
+     * 
+     * @param template The compound to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getInferredCompoundType(final T template);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java types of
+     * the members.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundType<List<?>> getInferredCompoundType(final String name,
+            List<String> memberNames, List<?> template);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java types of
+     * the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundType<List<?>> getInferredCompoundType(List<String> memberNames,
+            List<?> template);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java types of
+     * the members.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundType<Object[]> getInferredCompoundType(final String name,
+            String[] memberNames, Object[] template);
+
+    /**
+     * Returns the compound type for this HDF5 file, inferring the mapping from the Java types of
+     * the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public HDF5CompoundType<Object[]> getInferredCompoundType(String[] memberNames,
+            Object[] template);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getDataSetCompoundType(String objectPath, Class<T> pojoClass);
+
+    /**
+     * Returns the compound type for the given compound attribute in <var>attributeName</var> of
+     * <var>objectPath</var>, mapping it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getAttributeCompoundType(String objectPath,
+            String attributeName, Class<T> pojoClass);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @return The compound data type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getNamedCompoundType(String dataTypeName, Class<T> pojoClass);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. This method will use the default name for the compound data type as
+     * chosen by JHDF5 and thus will likely only work on files written with JHDF5. The default name
+     * is based on the simple name of <var>compoundType</var>.
+     * 
+     * @param pojoClass The class to use for the mapping and to get the name of named data type
+     *            from.
+     * @return The compound data type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> HDF5CompoundType<T> getNamedCompoundType(Class<T> pojoClass);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T readCompound(String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T readCompound(String objectPath, Class<T> pojoClass) throws HDF5JavaException;
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into a Java object.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T readCompound(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArray(String objectPath, HDF5CompoundType<T> type)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T[] readCompoundArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long blockNumber, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param offset The offset of the block to read (starting with 0).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param offset The offset of the block to read (starting with 0).
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> T[] readCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     * @throws HDF5JavaException If the data set is not of rank 1, not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5DataBlock<T[]>> getCompoundArrayNaturalBlocks(String objectPath,
+            Class<T> pojoClass) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, HDF5CompoundType<T> type)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, int[] blockDimensions, long[] offset)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> MDArray<T> readCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, int[] blockDimensions, long[] offset,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array.
+     * @see HDF5DataBlock
+     * @see CompoundType
+     * @see CompoundElement
+     * @throws HDF5JavaException If the data set is not a compound data set or if the mapping
+     *             between the compound type and the POJO is not complete.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#compound()} instead.
+     */
+    @Deprecated
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getCompoundMDArrayNaturalBlocks(
+            String objectPath, Class<T> pojoClass) throws HDF5JavaException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicWriter.java
new file mode 100644
index 0000000..335cc45
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundBasicWriter.java
@@ -0,0 +1,486 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever.IByteArrayInspector;
+
+/**
+ * An interface with legacy methods for writing compound values to HDF5 files. Do not use in any new
+ * code as it will be removed in a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5CompoundBasicWriter extends IHDF5CompoundBasicReader
+{
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a compound value of <var>type</var> given in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompound(String objectPath, HDF5CompoundType<T> type, T data);
+
+    /**
+     * Writes out a compound value. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeCompound(String objectPath, T data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. Uses a compact storage layout. Must only
+     * be used for small data sets.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompound(String objectPath, HDF5CompoundType<T> type, T data,
+            IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param features The storage features of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    @Deprecated
+    public <T> void writeCompoundArray(String objectPath, T[] data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a {@link HDF5CompoundDataMap},
+     *            {@link HDF5CompoundDataList} or <code>Object[]</code>.
+     * @param features The storage features of the data set.
+     * @see CompoundType
+     * @see CompoundElement
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArray(String objectPath, T[] data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block <var>blockNumber</var> of an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block <var>blockNumber</var> of an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param blockNumber The number of the block to write.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank 1) of compound values with given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param offset The offset of the block in the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset);
+
+    /**
+     * Writes out a block of an array (of rank 1) of compound values with given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param offset The offset of the block in the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, int size);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the compound array to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the compound array to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundArray(String objectPath, HDF5CompoundType<T> type, long size,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param features The storage features of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, HDF5GenericStorageFeatures features,
+            IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank N) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArray(String objectPath, MDArray<T> data);
+
+    /**
+     * Writes out an array (of rank N) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @param features The storage features of the data set.
+     * @see CompoundType
+     * @see CompoundElement
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArray(String objectPath, MDArray<T> data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] blockNumber);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockNumber The extent of the block to write on each axis.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] blockNumber, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param offset The offset of the block to write on each axis.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, long[] offset);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param offset The offset of the block to write on each axis.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, long[] offset,
+            IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void writeCompoundMDArrayBlockWithOffset(String objectPath,
+            HDF5CompoundType<T> type, MDArray<T> data, int[] blockDimensions, long[] offset,
+            int[] memoryOffset, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The dimensions of the compound array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            int[] dimensions);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The extent of the compound array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            long[] dimensions, int[] blockDimensions);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The extent of the compound array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            long[] dimensions, int[] blockDimensions, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The dimensions of the byte array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#compounds()} instead.
+     */
+    @Deprecated
+    public <T> void createCompoundMDArray(String objectPath, HDF5CompoundType<T> type,
+            int[] dimensions, HDF5GenericStorageFeatures features);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundInformationRetriever.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundInformationRetriever.java
new file mode 100644
index 0000000..bc33698
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundInformationRetriever.java
@@ -0,0 +1,750 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * An interface to get information on HDF5 compound data sets and compound data types, and to create
+ * compound types from mappings to Java classes.
+ * <p>
+ * <h2>What is an {@link HDF5CompoundType}?</h2>
+ * <p>
+ * A {@link HDF5CompoundType} is a Java object representing both an HDF5 compound type in a
+ * particular HDF5 file and the mapping of this HDF5 compound type to a representation in Java. A
+ * Java representation can be either a plain-old Java object (POJO) where Java fields correspond to
+ * HDF5 compound members, a map (see {@link HDF5CompoundDataMap}) where each HDF5 compound member is
+ * represented by one key-value pair, a list (see {@link HDF5CompoundDataList}) or
+ * <code>Object[]</code>, where the members of the HDF5 compound type are stored by their position
+ * (or order) in the HDF5 compound type.
+ * <p>
+ * It is important to understand that creating the HDF5 compound type in memory (what members of what types 
+ * it contains in what order) and mapping the members to Java (including the Java type and, for POJOs, the 
+ * field) are two distinct steps. Different methods of this interface use two different approaches on how 
+ * to create the HDF5 compound type: <code>getType()</code> and <code>getInferredType()</code> create them 
+ * anew, based on the POJO class and the <code>HDF5CompoundMemberMapping</code>s provided, while 
+ * <code>getNamedType()</code>, <code>getDataSetType()</code> and <code>getAttributeType()</code> read them 
+ * from the HDF5 file. Whenever you are reading a compound from an HDF5 file, the second approach should be 
+ * preferred as the HDF5 file is the authorative source of information on HDF5 types.
+ * <p>
+ * The following Java types can be mapped to compound members:
+ * <ul>
+ * <li>Primitive values</li>
+ * <li>Primitive arrays</li>
+ * <li>Primitive matrices (except <code>char[][]</code>)</li>
+ * <li>{@link String} (fixed-length and variable-lengt)</li>
+ * <li>{@link java.util.BitSet}</li>
+ * <li>{@link java.util.Date}</li>
+ * <li>{@link HDF5EnumerationValue}</li>
+ * <li>{@link HDF5EnumerationValueArray}</li>
+ * <li>Sub-classes of {@link MDAbstractArray}</li>
+ * <li>References to data sets</li>
+ * </ul>
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5CompoundInformationRetriever
+{
+
+    /**
+     * An interface for inspecting the byte array of compounds and compound arrays just after they
+     * are read from or before they are written to the HDF5 file.
+     */
+    public interface IByteArrayInspector
+    {
+        /**
+         * Called with the byte array. The method can change the <var>byteArray</var> but does so on
+         * its own risk!
+         */
+        void inspect(byte[] byteArray);
+    }
+
+    // /////////////////////
+    // Information
+    // /////////////////////
+
+    /**
+     * Returns the member information for the committed compound data type <var>compoundClass</var>
+     * (using its "simple name") in the order that the members appear in the compound type. It is a
+     * failure condition if this compound data type does not exist.
+     */
+    public <T> HDF5CompoundMemberInformation[] getMemberInfo(Class<T> compoundClass);
+
+    /**
+     * Returns the member information for the committed compound data type <var>dataTypeName</var>
+     * in the order that the members appear in the compound type. It is a failure condition if this
+     * compound data type does not exist. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the compound data type to get the member information for.
+     */
+    public HDF5CompoundMemberInformation[] getMemberInfo(String dataTypeName);
+
+    /**
+     * Returns the member information for the committed compound data type <var>dataTypeName</var>
+     * in the order that the members appear in the compound type. It is a failure condition if this
+     * compound data type does not exist. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the compound data type to get the member information for.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     */
+    public HDF5CompoundMemberInformation[] getMemberInfo(String dataTypeName,
+            DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the compound member information for the data set <var>dataSetPath</var> in the order
+     * that the members appear in the compound type. It is a failure condition if this data set does
+     * not exist or is not of compound type.
+     * <p>
+     * Call <code>Arrays.sort(compoundInformation)</code> to sort the array in alphabetical order of
+     * names.
+     * 
+     * @throws HDF5JavaException If the data set is not of type compound.
+     */
+    public HDF5CompoundMemberInformation[] getDataSetInfo(String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the compound member information for the data set <var>dataSetPath</var> in the order
+     * that the members appear in the compound type. It is a failure condition if this data set does
+     * not exist or is not of compound type.
+     * <p>
+     * Call <code>Arrays.sort(compoundInformation)</code> to sort the array in alphabetical order of
+     * names.
+     * 
+     * @param dataSetPath The name of the data set to get the member information for.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @throws HDF5JavaException If the data set is not of type compound.
+     */
+    public HDF5CompoundMemberInformation[] getDataSetInfo(String dataSetPath,
+            DataTypeInfoOptions dataTypeInfoOptions) throws HDF5JavaException;
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>pojoClass</var>. The mapping is defined by <var>members</var>.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     */
+    public <T> HDF5CompoundType<T> getType(String name, Class<T> pojoClass,
+            boolean requireTypesToBeEqual, HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the compound type <var>name></var> for this HDF5 file.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     */
+    public <T> HDF5CompoundType<T> getType(String name, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>pojoClass</var>. The name of the compound data type is chosen to be the simple name of
+     * <var>pojoClass</var>. The mapping is defined by <var>members</var>.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     */
+    public <T> HDF5CompoundType<T> getType(Class<T> pojoClass, HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>pojoClass</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>pojoClass</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>pojoClass</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>pojoClass</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>pojoClass</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>pojoClass</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound in the HDF5 file.
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, Class<T> pojoClass);
+
+    /**
+     * Returns a compound type for this HDF5 file, compatible with <var>pojoClass</var>. The mapping
+     * of the Java compound type to the HDF5 type is inferred by reflection from
+     * <var>pojoClass</var> following basic rules on how Java data types are mapped to HDF5 data
+     * types. As name of the HDF5 compound type, the simple name of <var>pojoClass</var> is chosen.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(Class<T> pojoClass);
+
+    /**
+     * Returns a compound type for this HDF5 file, compatible with <var>pojoClass</var>. The mapping
+     * of the Java compound type to the HDF5 type is inferred by reflection from
+     * <var>pojoClass</var> following basic rules on how Java data types are mapped to HDF5 data
+     * types. As name of the HDF5 compound type, the simple name of <var>pojoClass</var> is chosen.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(Class<T> pojoClass,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, T template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, T template,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, T template);
+
+    /**
+     * Returns a compound type for this HDF5 file, compatible with <var>template</var>. The mapping
+     * of the Java compound type to the HDF5 type is inferred by reflection from <var>template</var>
+     * following basic rules on how Java data types are mapped to HDF5 data types. As name of the
+     * HDF5 compound type, the simple name of the class of <var>template</var> is chosen.
+     * 
+     * @param template The compound to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(T template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(final String name, final T[] template);
+
+    /**
+     * Returns a compound type for this HDF5 file, compatible with <var>template</var>. The mapping
+     * of the Java compound type to the HDF5 type is inferred by reflection from <var>template</var>
+     * following basic rules on how Java data types are mapped to HDF5 data types. As name of the
+     * HDF5 compound type, the simple name of the class of <var>template</var> is chosen.
+     * 
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(final T[] template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, T[] template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var>. The mapping of the Java compound type to the HDF5 type is inferred by
+     * reflection from <var>template</var> following basic rules on how Java data types are mapped
+     * to HDF5 data types.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredType(String name, T[] template,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> template, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredType(String name, List<String> memberNames,
+            List<?> template, HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredType(List<String> memberNames, List<?> template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredType(List<String> memberNames, List<?> template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] template, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param name The name of the compound type in the HDF5 file.
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredType(String name, String[] memberNames,
+            Object[] template, HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredType(String[] memberNames, Object[] template);
+
+    /**
+     * Returns a compound type <var>name></var> for this HDF5 file, compatible with
+     * <var>template</var> and <var>memberNames</var>. The mapping of the Java compound type to the
+     * HDF5 type is inferred by reflection of the elements of <var>template</var> following basic
+     * rules on how Java data types are mapped to HDF5 data types. Each element of
+     * <var>template</var> is considered a member of the compound. The names are taken from
+     * <var>memberNames</var> in the same order as in <var>template</var>.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredType(String[] memberNames, Object[] template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            boolean requireTypesToBeEqual, HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns the compound type for the given compound data set in <var>objectPath</var>, mapping
+     * it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset to get the type from.
+     * @param pojoClass The class to use for the mapping.
+     */
+    public <T> HDF5CompoundType<T> getDataSetType(String objectPath, Class<T> pojoClass);
+
+    /**
+     * Returns the compound type for the given compound attribute in <var>attributeName</var> of
+     * <var>objectPath</var>, mapping it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset.
+     * @param attributeName The name of the attribute to get the type for.
+     * @param pojoClass The class to use for the mapping.
+     */
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass);
+
+    /**
+     * Returns the compound type for the given compound attribute in <var>attributeName</var> of
+     * <var>objectPath</var>, mapping it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset.
+     * @param attributeName The name of the attribute to get the type for.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     */
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the compound type for the given compound attribute in <var>attributeName</var> of
+     * <var>objectPath</var>, mapping it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset.
+     * @param attributeName The name of the attribute to get the type for.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     */
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints,
+            DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the compound type for the given compound attribute in <var>attributeName</var> of
+     * <var>objectPath</var>, mapping it to <var>pojoClass</var>.
+     * 
+     * @param objectPath The path of the compound dataset.
+     * @param attributeName The name of the attribute to get the type for.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     */
+    public <T> HDF5CompoundType<T> getAttributeType(String objectPath, String attributeName,
+            Class<T> pojoClass, HDF5CompoundMappingHints hints,
+            DataTypeInfoOptions dataTypeInfoOptions, boolean requireTypesToBeEqual);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. This method will use the default name for the compound data type as
+     * chosen by JHDF5 and thus will likely only work on files written with JHDF5. The default name
+     * is based on the simple name of <var>compoundType</var>.
+     * 
+     * @param pojoClass The class to use for the mapping and to get the name of named data type
+     *            from.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(Class<T> pojoClass);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the named compound type with name <var>dataTypeName</var> from file, mapping it to
+     * <var>pojoClass</var>. If the <var>dataTypeName</var> starts with '/', it will be considered a
+     * data type path instead of a data type name.
+     * <p>
+     * <em>Note:</em> This method only works for compound data types 'committed' to the HDF5 file.
+     * For files written with JHDF5 this will always be true, however, files created with other
+     * libraries may not choose to commit compound data types.
+     * 
+     * @param dataTypeName The path to a committed data type, if starting with '/', or a name of a
+     *            committed data type otherwise.
+     * @param pojoClass The class to use for the mapping.
+     * @param hints The hints to provide to the mapping procedure.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @param requireTypesToBeEqual If <code>true</code>, this type is required to be equal to the
+     *            type it tries to read, or else an {@link HDF5JavaException} will be thrown.
+     * @return The compound data type.
+     */
+    public <T> HDF5CompoundType<T> getNamedType(String dataTypeName, Class<T> pojoClass,
+            HDF5CompoundMappingHints hints, DataTypeInfoOptions dataTypeInfoOptions,
+            boolean requireTypesToBeEqual);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundReader.java
new file mode 100644
index 0000000..d5c7b7c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundReader.java
@@ -0,0 +1,438 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * An interface that provides methods for reading compound values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5CompoundReader extends IHDF5CompoundInformationRetriever
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a compound attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> T getAttr(String objectPath, String attributeName, HDF5CompoundType<T> type)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> T getAttr(String objectPath, String attributeName, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> T[] getArrayAttr(String objectPath, String attributeName, HDF5CompoundType<T> type)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> T[] getArrayAttr(String objectPath, String attributeName, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank N) attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> MDArray<T> getMDArrayAttr(String objectPath, String attributeName,
+            HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank N) attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the attribute.
+     * @throws HDF5JavaException If the <var>attributeName</var> is not a compound attribute.
+     */
+    public <T> MDArray<T> getMDArrayAttr(String objectPath, String attributeName, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T read(String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T read(String objectPath, Class<T> pojoClass) throws HDF5JavaException;
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into a Java object.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T read(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArray(String objectPath, HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T[] readArray(String objectPath, Class<T> pojoClass) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArrayBlock(String objectPath, HDF5CompoundType<T> type, int blockSize,
+            long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArrayBlock(String objectPath, HDF5CompoundType<T> type, int blockSize,
+            long blockNumber, IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param offset The offset of the block to read (starting with 0).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset) throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param offset The offset of the block to read (starting with 0).
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> T[] readArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int blockSize, long offset, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     */
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(String objectPath,
+            HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     */
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(String objectPath,
+            HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1 or not a compound data set.
+     * @throws HDF5JavaException If the data set is not of rank 1, not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     */
+    public <T> Iterable<HDF5DataBlock<T[]>> getArrayBlocks(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> MDArray<T> readMDArray(String objectPath, HDF5CompoundType<T> type)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set or if the
+     *             mapping between the compound type and the POJO is not complete.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> MDArray<T> readMDArray(String objectPath, Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> MDArray<T> readMDArray(String objectPath, HDF5CompoundType<T> type,
+            IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> MDArray<T> readMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound type.
+     */
+    public <T> MDArray<T> readMDArrayBlock(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] blockNumber, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> MDArray<T> readMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] offset) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a compound array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     */
+    public <T> MDArray<T> readMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            int[] blockDimensions, long[] offset, IByteArrayInspector inspectorOrNull)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @see HDF5MDDataBlock
+     */
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(String objectPath,
+            HDF5CompoundType<T> type) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param inspectorOrNull The inspector to be called before the byte array read from the HDF5
+     *            file is translated back into Java objects.
+     * @see HDF5MDDataBlock
+     */
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(String objectPath,
+            HDF5CompoundType<T> type, IByteArrayInspector inspectorOrNull) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set of compounds to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array.
+     * @see HDF5DataBlock
+     * @see CompoundType
+     * @see CompoundElement
+     * @throws HDF5JavaException If the data set is not a compound data set or if the mapping
+     *             between the compound type and the POJO is not complete.
+     */
+    public <T> Iterable<HDF5MDDataBlock<MDArray<T>>> getMDArrayBlocks(String objectPath,
+            Class<T> pojoClass) throws HDF5JavaException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundWriter.java
new file mode 100644
index 0000000..d8fa923
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5CompoundWriter.java
@@ -0,0 +1,644 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.List;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * An interface that provides methods for writing compound values to HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5CompoundWriter extends IHDF5CompoundReader
+{
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the compound type cloned from the given <var>templateType</var>. This method can be
+     * used to get a compound type from a different file.
+     * 
+     * @param templateType The compound type to clone. Will typically be a compound type from
+     *            another reader or writer. The type needs to be <i>open</i> (which means that the
+     *            reader / writer from which it has been obtained has to be still open).
+     */
+    public <T> HDF5CompoundType<T> getClonedType(final HDF5CompoundType<T> templateType);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, using the default name chosen by
+     * JHDF5 which is based on the simple name of <var>pojoClass</var>.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param members The mapping from the Java compound type to the HDF5 type.
+     */
+    public <T> HDF5CompoundType<T> getAnonType(Class<T> pojoClass,
+            HDF5CompoundMemberMapping... members);
+
+    /**
+     * Returns the anonymous compound type <var>name></var> for this HDF5 file, inferring the
+     * mapping from the Java compound type to the HDF5 type by reflection.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(Class<T> pojoClass,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the anonymous compound type <var>name></var> for this HDF5 file, inferring the
+     * mapping from the Java compound type to the HDF5 type by reflection.
+     * 
+     * @param pojoClass The plain old Java type that corresponds to this HDF5 type.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(Class<T> pojoClass);
+
+    /**
+     * Returns anonyous the compound type <var>name></var> for this HDF5 file, inferring the mapping
+     * from the Java compound type to the HDF5 type by reflection.
+     * 
+     * @param template The compound to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(T template, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, inferring the mapping from the Java
+     * compound type to the HDF5 type by reflection and using the default name chosen by JHDF5 which
+     * is based on the simple name of <var>T</var>.
+     * 
+     * @param template The compound to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(T template);
+
+    /**
+     * Returns the anonymous compound type <var>name></var> for this HDF5 file, inferring the
+     * mapping from the Java compound type to the HDF5 type by reflection.
+     * 
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(final T[] template);
+
+    /**
+     * Returns the anonymous compound type <var>name></var> for this HDF5 file, inferring the
+     * mapping from the Java compound type to the HDF5 type by reflection.
+     * 
+     * @param template The compound array to infer the HDF5 compound type from.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public <T> HDF5CompoundType<T> getInferredAnonType(T[] template, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, inferring the mapping from the Java
+     * types of the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredAnonType(List<String> memberNames,
+            List<?> template, HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, inferring the mapping from the Java
+     * types of the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length as <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<List<?>> getInferredAnonType(List<String> memberNames, List<?> template);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, inferring the mapping from the Java
+     * types of the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @param hints The hints to provide to the mapping procedure.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredAnonType(String[] memberNames, Object[] template,
+            HDF5CompoundMappingHints hints);
+
+    /**
+     * Returns the anonymous compound type for this HDF5 file, inferring the mapping from the Java
+     * types of the members.
+     * 
+     * @param memberNames The names of the members.
+     * @param template The compound to infer the HDF5 compound type from. Needs to have the same
+     *            length than <var>memberNames</var>.
+     * @see HDF5CompoundMemberMapping#inferMapping
+     */
+    public HDF5CompoundType<Object[]> getInferredAnonType(String[] memberNames, Object[] template);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets a compound attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param type The type definition of this compound type.
+     * @param value The value of the attribute. May be a Data Transfer Object, a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     */
+    public <T> void setAttr(String objectPath, String attributeName, HDF5CompoundType<T> type,
+            T value);
+
+    /**
+     * Sets a compound attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param value The value of the attribute. May be a Data Transfer Object, a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     */
+    public <T> void setAttr(String objectPath, String attributeName, T value);
+
+    /**
+     * Sets a compound attribute array (of rank 1) to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param type The type definition of this compound type.
+     * @param value The value of the attribute. Data Transfer Object, a {@link HDF5CompoundDataMap},
+     *            {@link HDF5CompoundDataList} or <code>Object[]</code> .
+     */
+    public <T> void setArrayAttr(String objectPath, String attributeName, HDF5CompoundType<T> type,
+            T[] value);
+
+    /**
+     * Sets a compound attribute array (of rank 1) to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param value The value of the attribute. May be a Data Transfer Object, a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     */
+    public <T> void setArrayAttr(String objectPath, String attributeName, T[] value);
+
+    /**
+     * Sets a compound attribute array (of rank N) to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param type The type definition of this compound type.
+     * @param value The value of the attribute. Data Transfer Object, a {@link HDF5CompoundDataMap},
+     *            {@link HDF5CompoundDataList} or <code>Object[]</code> .
+     */
+    public <T> void setMDArrayAttr(String objectPath, String attributeName,
+            HDF5CompoundType<T> type, MDArray<T> value);
+
+    /**
+     * Sets a compound attribute array (of rank N) to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param value The value of the attribute. May be a Data Transfer Object, a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     */
+    public <T> void setMDArrayAttr(String objectPath, String attributeName, MDArray<T> value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a compound value of <var>type</var> given in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     */
+    public <T> void write(String objectPath, HDF5CompoundType<T> type, T data);
+
+    /**
+     * Writes out a compound value. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void write(String objectPath, T data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. Uses a compact storage layout. Must only
+     * be used for small data sets.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     */
+    public <T> void write(String objectPath, HDF5CompoundType<T> type, T data,
+            IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     */
+    public <T> void writeArray(String objectPath, HDF5CompoundType<T> type, T[] data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param features The storage features of the data set.
+     */
+    public <T> void writeArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param features The storage features of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     */
+    public <T> void writeArray(String objectPath, HDF5CompoundType<T> type, T[] data,
+            HDF5GenericStorageFeatures features, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeArray(String objectPath, T[] data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a {@link HDF5CompoundDataMap},
+     *            {@link HDF5CompoundDataList} or <code>Object[]</code>.
+     * @param features The storage features of the data set.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeArray(String objectPath, T[] data, HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block <var>blockNumber</var> of an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param blockNumber The number of the block to write.
+     */
+    public <T> void writeArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block <var>blockNumber</var> of an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param blockNumber The number of the block to write.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     */
+    public <T> void writeArrayBlock(String objectPath, HDF5CompoundType<T> type, T[] data,
+            long blockNumber, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank 1) of compound values with given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param offset The offset of the block in the data set.
+     */
+    public <T> void writeArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset);
+
+    /**
+     * Writes out a block of an array (of rank 1) of compound values with given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The value of the data set.
+     * @param offset The offset of the block in the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5 file.
+     */
+    public <T> void writeArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            T[] data, long offset, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public <T> void createArray(String objectPath, HDF5CompoundType<T> type, int size);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the compound array to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     */
+    public <T> void createArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the compound array to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     */
+    public <T> void createArray(String objectPath, HDF5CompoundType<T> type, long size,
+            int blockSize, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public <T> void createArray(String objectPath, HDF5CompoundType<T> type, long size,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     */
+    public <T> void writeMDArray(String objectPath, HDF5CompoundType<T> type, MDArray<T> data);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param features The storage features of the data set.
+     */
+    public <T> void writeMDArray(String objectPath, HDF5CompoundType<T> type, MDArray<T> data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param features The storage features of the data set.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     */
+    public <T> void writeMDArray(String objectPath, HDF5CompoundType<T> type, MDArray<T> data,
+            HDF5GenericStorageFeatures features, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out an array (of rank N) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeMDArray(String objectPath, MDArray<T> data);
+
+    /**
+     * Writes out an array (of rank N) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @param features The storage features of the data set.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeMDArray(String objectPath, MDArray<T> data,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public <T> void writeMDArrayBlock(String objectPath, HDF5CompoundType<T> type, MDArray<T> data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockNumber The extent of the block to write on each axis.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     */
+    public <T> void writeMDArrayBlock(String objectPath, HDF5CompoundType<T> type, MDArray<T> data,
+            long[] blockNumber, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param offset The offset of the block to write on each axis.
+     */
+    public <T> void writeMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] offset);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param offset The offset of the block to write on each axis.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     */
+    public <T> void writeMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, long[] offset, IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public <T> void writeMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, int[] blockDimensions, long[] offset, int[] memoryOffset);
+
+    /**
+     * Writes out a block of an array (of rank N) of compound values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param data The data to write.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @param inspectorOrNull The inspector to be called after translating the Java objects to a
+     *            byte array and before writing the byte array to the HDF5.
+     */
+    public <T> void writeMDArrayBlockWithOffset(String objectPath, HDF5CompoundType<T> type,
+            MDArray<T> data, int[] blockDimensions, long[] offset, int[] memoryOffset,
+            IByteArrayInspector inspectorOrNull);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The dimensions of the compound array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public <T> void createMDArray(String objectPath, HDF5CompoundType<T> type, int[] dimensions);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The extent of the compound array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     */
+    public <T> void createMDArray(String objectPath, HDF5CompoundType<T> type, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The extent of the compound array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     */
+    public <T> void createMDArray(String objectPath, HDF5CompoundType<T> type, long[] dimensions,
+            int[] blockDimensions, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an array (of rank N) of compound values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this compound type.
+     * @param dimensions The dimensions of the byte array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public <T> void createMDArray(String objectPath, HDF5CompoundType<T> type, int[] dimensions,
+            HDF5GenericStorageFeatures features);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeReader.java
new file mode 100644
index 0000000..6fbeef7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeReader.java
@@ -0,0 +1,437 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Date;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface that provides methods for reading time and date values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5DateTimeReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the attribute <var>attributeName</var> of data set
+     * <var>objectPath</var> is a time stamp and <code>false</code> otherwise.
+     */
+    public boolean isTimeStamp(String objectPath, String attributeName) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public long getAttrAsLong(String objectPath, String attributeName);
+
+    /**
+     * Reads a time stamp array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp array; each element is a number of milliseconds since January 1, 1970,
+     *         00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public long[] getArrayAttrAsLong(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimension time stamp array attribute named <var>attributeName</var> from the
+     * data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp array; each element is a number of milliseconds since January 1, 1970,
+     *         00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public MDLongArray getMDArrayAttrAsLong(String objectPath, String attributeName);
+
+    /**
+     * Reads a time stamp attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var> and returns it as a <code>Date</code>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as {@link java.util.Date}.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public Date getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a time stamp array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var> and returns it as a <code>Date[]</code>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as {@link java.util.Date}.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public Date[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimension time stamp array attribute named <var>attributeName</var> from the
+     * data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as {@link java.util.Date}.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public MDArray<Date> getMDArrayAttr(String objectPath, String attributeName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the data set given by <var>objectPath</var> is a time stamp and
+     * <code>false</code> otherwise.
+     */
+    public boolean isTimeStamp(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp value from the data set <var>objectPath</var>. The time stamp is stored as
+     * a <code>long</code> value in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public long readTimeStamp(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp array from the data set <var>objectPath</var>. The time stamp is stored as
+     * a <code>long</code> value in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public long[] readTimeStampArray(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time stamp array (of rank 1) from the data set <var>objectPath</var>. The
+     * time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged as
+     * type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public long[] readTimeStampArrayBlock(String objectPath, int blockSize, long blockNumber);
+
+    /**
+     * Reads a block of a time stamp array (of rank 1) from the data set <var>objectPath</var>. The
+     * time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged as
+     * type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     */
+    public long[] readTimeStampArrayBlockWithOffset(String objectPath, int blockSize, long offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time stamps to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<long[]>> getTimeStampArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp value from the data set <var>objectPath</var> and returns it as a
+     * {@link Date}. The time stamp is stored as a <code>long</code> value in the HDF5 file. It
+     * needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date readDate(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp array (of rank 1) from the data set <var>objectPath</var> and returns it
+     * as an array of {@link Date}s. The time stamp array is stored as a an array of
+     * <code>long</code> values in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date[] readDateArray(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a {@link Date} array (of rank 1) from the data set <var>objectPath</var>.
+     * The time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged
+     * as type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public Date[] readDateArrayBlock(String objectPath, int blockSize, long blockNumber);
+
+    /**
+     * Reads a block of a {@link Date} array (of rank 1) from the data set <var>objectPath</var>.
+     * The time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged
+     * as type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     */
+    public Date[] readDateArrayBlockWithOffset(String objectPath, int blockSize, long offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of {@link Date}s to iterate
+     * over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<Date[]>> getDateArrayNaturalBlocks(String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional array of time stamps from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public MDLongArray readTimeStampMDArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional array of time stamps from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readTimeStampMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional array of time stamps from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readTimeStampMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+
+    /**
+     * Reads a multi-dimensional array data set <var>objectPath</var> of type time stamp into a
+     * given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, MDLongArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional array data set <var>objectPath</var> of type time
+     * stamp into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath, MDLongArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getTimeStampMDArrayNaturalBlocks(
+            String dataSetPath);
+
+    /**
+     * Reads a multi-dimensional array of {@link Date}s from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public MDArray<Date> readDateMDArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional array of {@link Date}s from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDArray<Date> readDateMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional array of {@link Date}s from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDArray<Date> readDateMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDArray<Date>>> getDateMDArrayNaturalBlocks(String dataSetPath);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeWriter.java
new file mode 100644
index 0000000..b09e02b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5DateTimeWriter.java
@@ -0,0 +1,436 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Date;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface that provides methods for writing time and date values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5DateTimeWriter extends IHDF5DateTimeReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a date value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param date The value of the attribute.
+     */
+    public void setAttr(String objectPath, String attributeName,
+            Date date);
+
+    /**
+     * Set a date array value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param dates The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String attributeName,
+            Date[] dates);
+
+    /**
+     * Set a time stamp value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeStamp The value of the attribute.
+     */
+    public void setAttr(String objectPath, String attributeName,
+            long timeStamp);
+
+    /**
+     * Set a time stamp array value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeStamps The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String attributeName,
+            long[] timeStamps);
+
+    /**
+     * Sets a multi-dimensional timestamp array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name,
+            MDLongArray value);
+
+    /**
+     * Sets a multi-dimensional timestamp array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name,
+            MDArray<Date> value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a time stamp value. The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamp The timestamp to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     */
+    public void write(String objectPath, long timeStamp);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The length of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The length of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a time stamp array (of rank 1). The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamps The timestamps to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     */
+    public void writeArray(String objectPath, long[] timeStamps);
+
+    /**
+     * Writes out a time stamp array (of rank 1). The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamps The timestamps to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, long[] timeStamps,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a time stamp array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createArray(String, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link IHDF5LongWriter#createArray(String, long, int, HDF5IntStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, long[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a time stamp array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createArray(String, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, long[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link IHDF5LongWriter#createArray(String, long, int, HDF5IntStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, long[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a time stamp value provided as a {@link Date}.
+     * <p>
+     * <em>Note: The time stamp is stored as <code>long</code> array and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param date The date to write.
+     * @see #write(String, long)
+     */
+    public void write(String objectPath, Date date);
+
+    /**
+     * Writes out a {@link Date} array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dates The dates to write.
+     * @see #writeArray(String, long[])
+     */
+    public void writeArray(String objectPath, Date[] dates);
+
+    /**
+     * Writes out a {@link Date} array (of rank 1).
+     * <p>
+     * <em>Note: Time date is stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dates The dates to write.
+     * @param features The storage features of the data set.
+     * @see #writeArray(String, long[], HDF5GenericStorageFeatures)
+     */
+    public void writeArray(String objectPath, Date[] dates,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a multi-dimensional array of time stamps.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDLongArray data,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a multi-dimensional array of time stamps / dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a on-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+    
+    /**
+     * Creates a multi-dimensional array of time stamps / dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional array of time stamps / dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>long</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a multi-dimensional array of time stamps / dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of time stamps.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDLongArray data,
+            long[] blockNumber);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of time stamps.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of time stamps.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+    
+    /**
+     * Writes out a multi-dimensional array of dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDArray<Date> data,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDArray<Date> data,
+            long[] blockNumber);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of daates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDArray<Date> data,
+            long[] offset);
+    
+    /**
+     * Writes out a block of a multi-dimensional array of dates.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDArray<Date> data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleReader.java
new file mode 100644
index 0000000..40e2718
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+
+/**
+ * An interface that provides methods for reading <code>double</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5DoubleReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>double</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public double getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>double[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public double[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>double</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDDoubleArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>double</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public double[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>double</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public double read(String objectPath);
+
+    /**
+     * Reads a <code>double</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public double[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDDoubleArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>double</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDDoubleArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>double</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>double[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public double[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>double</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>double[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public double[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public double[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public double[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public double[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDDoubleArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>double</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>double</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDDoubleArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<double[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDDoubleArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleWriter.java
new file mode 100644
index 0000000..7ba7ecb
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5DoubleWriter.java
@@ -0,0 +1,546 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+
+/**
+ * An interface that provides methods for writing <code>double</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5DoubleWriter extends IHDF5DoubleReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>double</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, double value);
+
+    /**
+     * Set a <code>double[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, double[] value);
+
+    /**
+     * Set a multi-dimensional code>double</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDDoubleArray value);
+
+    /**
+     * Set a <code>double[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, double[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>double</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, double value);
+
+    /**
+     * Writes out a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, double[] data);
+
+    /**
+     * Writes out a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, double[] data, 
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>double</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5FloatStorageFeatures#FLOAT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5FloatStorageFeatures features);
+    
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5FloatStorageFeature.FLOAT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>double</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, double[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>double</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, double[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, double[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, double[][] data);
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, double[][] data, 
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5FloatStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5FloatStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the double matrix to create.
+     * @param sizeY The size of the y dimension of the double matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the double matrix to create.
+     * @param sizeY The size of the y dimension of the double matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, double[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, double[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, double[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, double[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, double[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDDoubleArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDDoubleArray data,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>double</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDDoubleArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>double</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDDoubleArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>double</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5FloatStorageFeatures#FLOAT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDDoubleArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>double</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDDoubleArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>double</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDDoubleArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>double</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>double</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDDoubleArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicReader.java
new file mode 100644
index 0000000..e601ac9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicReader.java
@@ -0,0 +1,364 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface with legacy methods for reading enumeration values from HDF5 files. Do not use in
+ * any new code as it will be removed in a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5EnumBasicReader
+{
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Use this method only when
+     * you know that the type exists. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumType(final String dataTypeName);
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumType(final String dataTypeName, final String[] values)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>values</var> provided.
+     * @throws HDF5JavaException If <code>check = true</code>, the data type exists and is not
+     *             compatible with the <var>values</var> provided.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumType(final String dataTypeName, final String[] values,
+            final boolean check) throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type for the data set <var>dataSetPath</var>.
+     * 
+     * @param dataSetPath The name of data set to get the enumeration type for.
+     * @deprecated Use {@link #getDataSetEnumType(String)} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumTypeForObject(final String dataSetPath);
+
+    /**
+     * Returns the enumeration type for the data set <var>dataSetPath</var>.
+     * 
+     * @param dataSetPath The name of data set to get the enumeration type for.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getDataSetEnumType(final String dataSetPath);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set as a String.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public String getEnumAttributeAsString(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValue getEnumAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public <T extends Enum<T>> T getEnumAttribute(final String objectPath,
+            final String attributeName, Class<T> enumClass) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute values as read from the data set as Strinsg.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     * @deprecated Use {@link #getEnumArrayAttribute(String, String)} instead and call
+     *             {@link HDF5EnumerationValueArray#toStringArray()}.
+     */
+    @Deprecated
+    public String[] getEnumArrayAttributeAsString(final String objectPath,
+            final String attributeName) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute values as read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray getEnumArrayAttribute(final String objectPath,
+            final String attributeName) throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set as a String.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum type.
+     */
+    public String readEnumAsString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValue readEnum(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var> or if
+     *             <var>enumClass</var> is incompatible with the HDF5 enumeration type of
+     *             <var>objectPath</var>.
+     */
+    public <T extends Enum<T>> T readEnum(final String objectPath, Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * <p>
+     * This method is faster than {@link #readEnum(String)} if the {@link HDF5EnumerationType} is
+     * already available.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enum type in the HDF5 file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValue readEnum(final String objectPath, final HDF5EnumerationType enumType)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArray(final String objectPath,
+            final HDF5EnumerationType enumType) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public <T extends Enum<T>> T[] readEnumArray(final String objectPath, Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set as an array of Strings.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum type.
+     */
+    public String[] readEnumArrayAsString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArrayBlock(final String objectPath,
+            final int blockSize, final long blockNumber);
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArrayBlock(final String objectPath,
+            final HDF5EnumerationType enumType, final int blockSize, final long blockNumber);
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset);
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationValueArray readEnumArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType enumType, final int blockSize, final long offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enums()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getEnumArrayNaturalBlocks(
+            final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getEnumArrayNaturalBlocks(
+            final String objectPath, final HDF5EnumerationType enumType) throws HDF5JavaException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicWriter.java
new file mode 100644
index 0000000..45c8c9c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumBasicWriter.java
@@ -0,0 +1,297 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface with legacy methods for writing enumeration values from HDF5 files. Do not use in any
+ * new code as it will be removed in a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5EnumBasicWriter
+{
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumType(String dataTypeName, String[] values)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>values</var> provided.
+     * @throws HDF5JavaException If <code>check = true</code>, the data type exists and is not
+     *             compatible with the <var>values</var> provided.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType getEnumType(String dataTypeName, String[] values,
+            boolean check) throws HDF5JavaException;
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets an enum attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void setEnumAttribute(String objectPath, String name,
+            HDF5EnumerationValue value);
+
+    /**
+     * Sets an enum attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void setEnumAttribute(String objectPath, String name, Enum<?> value);
+
+    /**
+     * Sets an enum array attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void setEnumArrayAttribute(String objectPath, String name,
+            HDF5EnumerationValueArray value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void writeEnum(String objectPath, HDF5EnumerationValue value);
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     */
+    public <T extends Enum<T>> void writeEnum(String objectPath, Enum<T> value);
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param options The allowed values of the enumeration type.
+     * @param value The value of the data set.
+     */
+    public void writeEnum(String objectPath, String[] options, String value);
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void writeEnumArray(String objectPath, HDF5EnumerationValueArray data);
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     */
+    public <T extends Enum<T>> void writeEnumArray(String objectPath, Enum<T>[] data);
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param options The allowed values of the enumeration type.
+     * @param data The data to write.
+     */
+    public void writeEnumArray(String objectPath, String[] options, String[] data);
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param features The storage features of the data set. Note that for scaling compression the
+     *            compression factor is ignored. Instead, the scaling factor is computed from the
+     *            number of entries in the enumeration.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void writeEnumArray(String objectPath, HDF5EnumerationValueArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates am enum array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType createEnumArray(String objectPath,
+            HDF5EnumerationType enumType, int size);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType createEnumArray(String objectPath,
+            HDF5EnumerationType enumType, long size, int blockSize);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType createEnumArray(String objectPath,
+            HDF5EnumerationType enumType, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public HDF5EnumerationType createEnumArray(String objectPath,
+            HDF5EnumerationType enumType, long size,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of an enum array (of rank 1). The data set needs to have been created by
+     * {@link #createEnumArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)}
+     * beforehand. Obviously the {@link HDF5EnumerationType} of the create call and this call needs
+     * to match.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createEnumArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The value of {@link HDF5EnumerationValueArray#getLength()}
+     *            defines the block size. Must not be <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void writeEnumArrayBlock(String objectPath, HDF5EnumerationValueArray data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of an enum array (of rank 1). The data set needs to have been created by
+     * {@link #createEnumArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)}
+     * beforehand. Obviously the {@link HDF5EnumerationType} of the create call and this call needs
+     * to match.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createEnumArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The value of {@link HDF5EnumerationValueArray#getLength()}
+     *            defines the block size. Must not be <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be
+     *            <code><= data.getLength()</code> )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public void writeEnumArrayBlockWithOffset(String objectPath,
+            HDF5EnumerationValueArray data, int dataSize, long offset);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumReader.java
new file mode 100644
index 0000000..adfff40
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumReader.java
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface that provides methods for reading enumeration values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5EnumReader extends IHDF5EnumTypeRetriever
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set as a String.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     */
+    public String getAttrAsString(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     */
+    public HDF5EnumerationValue getAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     */
+    public <T extends Enum<T>> T getAttr(final String objectPath,
+            final String attributeName, Class<T> enumClass) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> array (of rank 1) attribute named <var>attributeName</var> from
+     * the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute values as read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     */
+    public HDF5EnumerationValueArray getArrayAttr(final String objectPath,
+            final String attributeName) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>enum</code> array (of rank 1) attribute named <var>attributeName</var> from
+     * the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute values as read from the data set.
+     * @throws HDF5JavaException If the attribute is not an enum type.
+     */
+    public HDF5EnumerationValueMDArray getMDArrayAttr(final String objectPath,
+            final String attributeName) throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set as a String.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum type.
+     */
+    public String readAsString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValue read(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var> or if
+     *             <var>enumClass</var> is incompatible with the HDF5 enumeration type of
+     *             <var>objectPath</var>.
+     */
+    public <T extends Enum<T>> T read(final String objectPath, Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * <p>
+     * This method is faster than {@link #read(String)} if the {@link HDF5EnumerationType} is
+     * already available.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enum type in the HDF5 file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValue read(final String objectPath, final HDF5EnumerationType enumType)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValueArray readArray(final String objectPath,
+            final HDF5EnumerationType enumType) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValueArray readArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array block from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public HDF5EnumerationValueArray readArrayBlock(final String objectPath,
+            final int blockSize, final long blockNumber);
+
+    /**
+     * Reads an <code>Enum</code> value array block from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public HDF5EnumerationValueArray readArrayBlock(final String objectPath,
+            final HDF5EnumerationType enumType, final int blockSize, final long blockNumber);
+
+    /**
+     * Reads an <code>Enum</code> value array block from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public HDF5EnumerationValueArray readArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset);
+
+    /**
+     * Reads an <code>Enum</code> value array block from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param blockSize The block size (this will be the return value of the
+     *            {@link HDF5EnumerationValueArray#getLength()} returned if the data set is long
+     *            enough).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public HDF5EnumerationValueArray readArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType enumType, final int blockSize, final long offset);
+
+    /**
+     * Reads an <code>Enum</code> array (of rank N) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValueMDArray readMDArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> array (of rank N) from the data set <var>objectPath</var>.
+     * <p>
+     * This method is faster than {@link #read(String)} if the {@link HDF5EnumerationType} is
+     * already available.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enum type in the HDF5 file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public HDF5EnumerationValueMDArray readMDArray(final String objectPath,
+            final HDF5EnumerationType enumType) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a <code>Enum</code> array block (of rank N) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this enumeration type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum data set.
+     */
+    public HDF5EnumerationValueMDArray readMDArrayBlock(final String objectPath,
+            final HDF5EnumerationType type, final int[] blockDimensions, final long[] blockNumber)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block from a <code>Enum</code> array block (of rank N) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param blockNumber The number of the block to write along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum data set.
+     */
+    public HDF5EnumerationValueMDArray readMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block from a <code>Enum</code> array block (of rank N) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this <code>Enum</code> type.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum data set.
+     */
+    public HDF5EnumerationValueMDArray readMDArrayBlockWithOffset(final String objectPath,
+            final HDF5EnumerationType type, final int[] blockDimensions, final long[] offset)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block from a <code>Enum</code> array block (of rank N) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block to write along each axis.
+     * @param offset The offset of the block to write in the data set along each axis.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum data set.
+     */
+    public HDF5EnumerationValueMDArray readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getArrayBlocks(
+            final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<HDF5EnumerationValueArray>> getArrayBlocks(
+            final String objectPath, final HDF5EnumerationType enumType) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this enum type.
+     * @see HDF5MDDataBlock
+     * @throws HDF5JavaException If the data set is not an enum data set.
+     */
+    public Iterable<HDF5MDEnumBlock> getMDArrayBlocks(final String objectPath,
+            final HDF5EnumerationType type) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5MDDataBlock
+     * @throws HDF5JavaException If the data set is not an enum data set.
+     */
+    public Iterable<HDF5MDEnumBlock> getMDArrayBlocks(final String objectPath)
+            throws HDF5JavaException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumTypeRetriever.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumTypeRetriever.java
new file mode 100644
index 0000000..9bc14ec
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumTypeRetriever.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface for retrieving HDF5 enum types. Depending on whether it is reader or a writer that
+ * implements it, non-existing enum types may be created by calling the methods of this interface or
+ * an exception may be thrown.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5EnumTypeRetriever
+{
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Use this method only when
+     * you know that the type exists. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     */
+    public HDF5EnumerationType getType(String dataTypeName);
+
+    /**
+     * Returns the enumeration type for the data set <var>dataSetPath</var>.
+     * 
+     * @param dataSetPath The name of data set to get the enumeration type for.
+     */
+    public HDF5EnumerationType getDataSetType(String dataSetPath);
+
+    /**
+     * Returns the enumeration type for the data set <var>dataSetPath</var>.
+     * 
+     * @param dataSetPath The name of data set.
+     * @param attributeName The name of the attribute to get the type for.
+     */
+    public HDF5EnumerationType getAttributeType(String dataSetPath, String attributeName);
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the type is read from the
+     * file, it will check the type in the file with the <var>values</var>. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public HDF5EnumerationType getType(String dataTypeName, String[] values)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the type is read from the
+     * file, it will check the type in the file with the <var>values</var>. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param genericType The generic enum type (independent of this file).
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public HDF5EnumerationType getType(EnumerationType genericType)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param values The values of the enumeration.
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>values</var> provided.
+     * @throws HDF5JavaException If <code>check = true</code>, the data type exists and is not
+     *             compatible with the <var>values</var> provided.
+     */
+    public HDF5EnumerationType getType(String dataTypeName, String[] values, boolean check)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. If the type is read from the
+     * file, it will check the type in the file with the <var>values</var>. If the
+     * <var>dataTypeName</var> starts with '/', it will be considered a data type path instead of a
+     * data type name.
+     * 
+     * @param genericType The generic enum type (independent of this file).
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>values</var> provided.
+     * @throws HDF5JavaException If <code>check = true</code>, the data type exists and is not
+     *             compatible with the <var>values</var> provided.
+     */
+    public HDF5EnumerationType getType(EnumerationType genericType, boolean check)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param enumClass The enumeration class to get the values from.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>enumClass</var> provided.
+     */
+    public HDF5EnumerationType getType(String dataTypeName, Class<? extends Enum<?>> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. If the <var>dataTypeName</var> starts with '/', it will be
+     * considered a data type path instead of a data type name.
+     * 
+     * @param dataTypeName The name of the enumeration in the HDF5 file.
+     * @param enumClass The enumeration class to get the values from.
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>enumClass</var> provided.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public <T extends Enum<?>> HDF5EnumerationType getType(String dataTypeName, Class<T> enumClass,
+            boolean check) throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. Will use the simple class name of <var>enumClass</var> as
+     * the data type name.
+     * 
+     * @param enumClass The enumeration class to get the values from.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public <T extends Enum<?>> HDF5EnumerationType getType(Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Returns the enumeration type <var>name</var> for this HDF5 file. Will check the type in the
+     * file with the <var>values</var>. Will use the simple class name of <var>enumClass</var> as
+     * the data type name.
+     * 
+     * @param enumClass The enumeration class to get the values from.
+     * @param check If <code>true</code> and if the data type already exists, check whether it is
+     *            compatible with the <var>enumClass</var> provided.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public HDF5EnumerationType getType(Class<? extends Enum<?>> enumClass, boolean check)
+            throws HDF5JavaException;
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumValueCreator.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumValueCreator.java
new file mode 100644
index 0000000..a00a3e3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumValueCreator.java
@@ -0,0 +1,456 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+
+/**
+ * Interface for creation of enumeration values.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5EnumValueCreator
+{
+
+    /**
+     * Creates a new enumeration value with enumeration type name <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param value The string representation of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newVal(String typeName, String[] options, String value);
+
+    /**
+     * Creates a new enumeration value with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param value The string representation of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newAnonVal(String[] options, String value);
+
+    /**
+     * Creates a new enumeration value with enumeration type name <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param value The ordinal value of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newVal(String typeName, String[] options, final int value);
+
+    /**
+     * Creates a new enumeration value with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param value The ordinal of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newAnonVal(String[] options, int value);
+
+    /**
+     * Creates a new enumeration value with enumeration type name <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param value The ordinal value of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newVal(String typeName, String[] options, final short value);
+
+    /**
+     * Creates a new enumeration value with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param value The ordinal value of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newAnonVal(String[] options, short value);
+
+    /**
+     * Creates a new enumeration value with enumeration type name <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param value The ordinal value of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newVal(String typeName, String[] options, final byte value);
+
+    /**
+     * Creates a new enumeration value with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param value The ordinal of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValue newAnonVal(String[] options, byte value);
+
+    /**
+     * Creates a new enumeration value with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getAnonymousEnumType(value.class), getClass())</code>.
+     * 
+     * @param value The value (including the type) of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValue newAnonVal(Enum<T> value);
+
+    /**
+     * Creates a new enumeration value with an enumeration type of name
+     * <code>value.getClass().getSimpleName()</code>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(value.getClass()), value)</code>.
+     * 
+     * @param value The value (including the type) of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValue newVal(Enum<T> value);
+
+    /**
+     * Creates a new enumeration value with an enumeration type of name <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValue(writer.getEnumType(typeName, value.getClass()), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param value The value (including the type) of the created enumeration value.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValue newVal(String typeName, Enum<T> value);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The string representations of the elements of the created enumeration value
+     *            array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, String[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The string representations of the elements of the created enumeration value
+     *            array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newAnonArray(String[] options, String[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, int[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newAnonArray(String[] options, int[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, short[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newAnonArray(String[] options, short[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newArray(String typeName, String[] options, byte[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueArray newAnonArray(String[] options, byte[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(value.getClass().getComponentType()), value)</code>.
+     * 
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueArray newAnonArray(Enum<T>[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, value.getClass().getComponentType()), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueArray newArray(String typeName, Enum<T>[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank 1) with an enumeration type of name
+     * <code>value.class.getSimpleName()</code>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(value.getClass().getComponentType()), value)</code>.
+     * 
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueArray newArray(Enum<T>[] values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The string representations of the elements of the created enumeration value
+     *            array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDArray<String> values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The string representations of the elements of the created enumeration value
+     *            array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDArray<String> values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDIntArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDIntArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDShortArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDShortArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newMDArray(String typeName, String[] options,
+            MDByteArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, options), value)</code>.
+     * 
+     * @param options The values of the enumeration type.
+     * @param values The ordinal values of the elements of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public HDF5EnumerationValueMDArray newAnonMDArray(String[] options, MDByteArray values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an anonymous enumeration type.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getAnonymousEnumType(value.getAsFlatArray().getClass().getComponentType()), value)</code>.
+     * 
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newAnonMDArray(MDArray<Enum<T>> values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <var>typeName</var>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(typeName, value.getAsFlatArray().getClass().getComponentType()), value)</code>.
+     * 
+     * @param typeName The name of the enumeration type.
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newMDArray(String typeName,
+            MDArray<Enum<T>> values);
+
+    /**
+     * Creates a new enumeration value array (of rank N) with an enumeration type of name
+     * <code>value.class.getSimpleName()</code>.
+     * <p>
+     * Shortcut for
+     * <code>new HDF5EnumerationValueArray(writer.getEnumType(value.getAsFlatArray().getClass().getComponentType()), value)</code>.
+     * 
+     * @param values The value array (which has the type) of the created enumeration value array.
+     * @return The created enumeration value.
+     */
+    public <T extends Enum<T>> HDF5EnumerationValueMDArray newMDArray(MDArray<Enum<T>> values);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumWriter.java
new file mode 100644
index 0000000..564fa8d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5EnumWriter.java
@@ -0,0 +1,361 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface that provides methods for writing enumeration values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5EnumWriter extends IHDF5EnumReader, IHDF5EnumValueCreator
+{
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns an anonymous enumeration type for this HDF5 file.
+     * 
+     * @param options The values of the enumeration type.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>values</var> provided.
+     */
+    public HDF5EnumerationType getAnonType(String[] options)
+            throws HDF5JavaException;
+
+    /**
+     * Returns an anonymous enumeration type for this HDF5 file.
+     * 
+     * @param enumClass The enumeration class to get the values from.
+     * @throws HDF5JavaException If the data type exists and is not compatible with the
+     *             <var>enumClass</var> provided.
+     */
+    public HDF5EnumerationType getAnonType(Class<? extends Enum<?>> enumClass)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets an enum attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name,
+            HDF5EnumerationValue value);
+
+    /**
+     * Sets an enum attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public void setAttr(String objectPath, String name, Enum<?> value)
+            throws HDF5JavaException;
+
+    /**
+     * Sets an enum array attribute (of rank 1) to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name,
+            HDF5EnumerationValueArray value);
+
+    /**
+     * Sets an enum array (of rank N) attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name,
+            HDF5EnumerationValueMDArray value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public void write(String objectPath, HDF5EnumerationValue value)
+            throws HDF5JavaException;
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public void write(String objectPath, Enum<?> value) throws HDF5JavaException;
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public void writeArray(String objectPath, HDF5EnumerationValueArray data)
+            throws HDF5JavaException;
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param features The storage features of the data set. Note that for scaling compression the
+     *            compression factor is ignored. Instead, the scaling factor is computed from the
+     *            number of entries in the enumeration.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public void writeArray(String objectPath, HDF5EnumerationValueArray data,
+            HDF5IntStorageFeatures features) throws HDF5JavaException;
+
+    /**
+     * Creates am enum array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createArray(String objectPath,
+            HDF5EnumerationType enumType, int size);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createArray(String objectPath,
+            HDF5EnumerationType enumType, long size, int blockSize);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createArray(String objectPath,
+            HDF5EnumerationType enumType, long size,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates am enum array (of rank 1). The initial size of the array is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumType The enumeration type of this array.
+     * @param size The size of the enum array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createArray(String objectPath,
+            HDF5EnumerationType enumType, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of an enum array (of rank 1). The data set needs to have been created by
+     * {@link #createArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)}
+     * beforehand. Obviously the {@link HDF5EnumerationType} of the create call and this call needs
+     * to match.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The value of {@link HDF5EnumerationValueArray#getLength()}
+     *            defines the block size. Must not be <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, HDF5EnumerationValueArray data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of an enum array (of rank 1). The data set needs to have been created by
+     * {@link #createArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)}
+     * beforehand. Obviously the {@link HDF5EnumerationType} of the create call and this call needs
+     * to match.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, HDF5EnumerationType, long, int, HDF5IntStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The value of {@link HDF5EnumerationValueArray#getLength()}
+     *            defines the block size. Must not be <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be
+     *            <code><= data.getLength()</code> )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath,
+            HDF5EnumerationValueArray data, int dataSize, long offset);
+
+    /**
+     * Writes out an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, HDF5EnumerationValueMDArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     */
+    public void writeMDArray(String objectPath, HDF5EnumerationValueMDArray data);
+
+    /**
+     * Creates an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this Enum type.
+     * @param dimensions The dimensions of the byte array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createMDArray(String objectPath,
+            HDF5EnumerationType type, int[] dimensions);
+
+    /**
+     * Creates an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this Enum type.
+     * @param dimensions The extent of the Enum array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createMDArray(String objectPath,
+            HDF5EnumerationType type, long[] dimensions, int[] blockDimensions);
+
+    /**
+     * Creates an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this Enum type.
+     * @param dimensions The extent of the Enum array along each of the axis.
+     * @param blockDimensions The extent of one block along each of the axis. (for block-wise IO).
+     *            Ignored if no extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and
+     *            <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createMDArray(String objectPath,
+            HDF5EnumerationType type, long[] dimensions, int[] blockDimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array (of rank N) of Enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param type The type definition of this Enum type.
+     * @param dimensions The dimensions of the byte array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (along
+     *            each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array (along each axis) will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @return <var>enumType</var>
+     */
+    public HDF5EnumerationType createMDArray(String objectPath,
+            HDF5EnumerationType type, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of an array (of rank N) of Enum values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath,
+            HDF5EnumerationValueMDArray data, long[] blockNumber);
+
+    /**
+     * Writes out a block of an array (of rank N) of Enum values give a given <var>offset</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath,
+            HDF5EnumerationValueMDArray data, long[] offset);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5Factory.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5Factory.java
new file mode 100644
index 0000000..4cd915b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5Factory.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+/**
+ * A factory for creating writers and readers of HDF5 files. For straight-forward creation, see
+ * methods {@link #open(File)} and {@link #openForReading(File)}. If you need full control over the
+ * creation process, see the methods {@link #configure(File)} and {@link #configureForReading(File)}
+ * .
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5Factory
+{
+    /**
+     * Opens an HDF5 <var>file</var> for writing and reading. If the file does not yet exist, it
+     * will be created.
+     */
+    public IHDF5Writer open(File file);
+
+    /**
+     * Opens an HDF5 <var>file</var> for reading. It is an error if the file does not exist.
+     */
+    public IHDF5Reader openForReading(File file);
+
+    /**
+     * Opens a configurator for an HDF5 <var>file</var> for writing and reading. Configure the
+     * writer as you need and then call {@link IHDF5WriterConfigurator#writer()} in order to start
+     * reading and writing the file.
+     */
+    public IHDF5WriterConfigurator configure(File file);
+
+    /**
+     * Opens a configurator for an HDF5 <var>file</var> for reading. Configure the reader as you
+     * need and then call {@link IHDF5ReaderConfigurator#reader()} in order to start reading the
+     * file.
+     */
+    public IHDF5ReaderConfigurator configureForReading(File file);
+
+    /**
+     * Returns <code>true</code>, if the <var>file</var> is an HDF5 file and <code>false</code>
+     * otherwise.
+     */
+    public boolean isHDF5File(File file);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadOnlyHandler.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadOnlyHandler.java
new file mode 100644
index 0000000..3a0718e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadOnlyHandler.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+
+/**
+ * An interface for handling file-level information and status of the reader. 
+ *
+ * @author Bernd Rinn
+ */
+public interface IHDF5FileLevelReadOnlyHandler
+{
+
+    // /////////////////////
+    // Configuration
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if numeric conversions should be performed automatically, e.g.
+     * between <code>float</code> and <code>int</code>.
+     */
+    public boolean isPerformNumericConversions();
+
+    /**
+     * Returns the suffix used to mark and recognize internal (house keeping) files and groups. An
+     * empty string ("") encodes for the default, which is two leading and two trailing underscores
+     * ("__NAME__")
+     */
+    public String getHouseKeepingNameSuffix();
+
+    /**
+     * Returns the HDF5 file that this class is reading.
+     */
+    public File getFile();
+
+    // /////////////////////
+    // Status
+    // /////////////////////
+
+    /**
+     * Closes this object and the file referenced by this object. This object must not be used after
+     * being closed. Calling this method for a second time is a no-op.
+     */
+    public void close();
+
+    /**
+     * Returns <code>true</code> if this reader has been already closed.
+     */
+    public boolean isClosed();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadWriteHandler.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadWriteHandler.java
new file mode 100644
index 0000000..4ab2fb6
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5FileLevelReadWriteHandler.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.Flushable;
+
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * An interface for handling file-level information and status of the writer. 
+ *
+ * @author Bernd Rinn
+ */
+public interface IHDF5FileLevelReadWriteHandler extends IHDF5FileLevelReadOnlyHandler
+{
+
+    // /////////////////////
+    // Configuration
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the {@link IHDF5WriterConfigurator} was <em>not</em> configured
+     * with {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}, that is if extendable data
+     * types are used for new data sets.
+     */
+    public boolean isUseExtendableDataTypes();
+
+    /**
+     * Returns the {@link FileFormat} compatibility setting for this writer.
+     */
+    public FileFormat getFileFormat();
+
+    // /////////////////////
+    // Flushing and Syncing
+    // /////////////////////
+
+    /**
+     * Flushes the cache to disk (without discarding it). Note that this may or may not trigger a
+     * <code>fsync(2)</code>, depending on the {@link IHDF5WriterConfigurator.SyncMode} used.
+     */
+    public void flush();
+
+    /**
+     * Flushes the cache to disk (without discarding it) and synchronizes the file with the
+     * underlying storage using a method like <code>fsync(2)</code>, regardless of what
+     * {@link IHDF5WriterConfigurator.SyncMode} has been set for this file.
+     * <p>
+     * This method blocks until <code>fsync(2)</code> has returned.
+     */
+    public void flushSyncBlocking();
+
+    /**
+     * Adds a {@link Flushable} to the set of flushables. This set is flushed when {@link #flush()}
+     * or {@link #flushSyncBlocking()} are called and before the writer is closed.
+     * <p>
+     * This function is supposed to be used for in-memory caching structures that need to make it
+     * into the HDF5 file.
+     * <p>
+     * If the <var>flushable</var> implements
+     * {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy}, in case of an exception in
+     * {@link Flushable#flush()}, the method
+     * {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy#dealWithError(Throwable)} will be
+     * called to decide how do deal with the exception.
+     * 
+     * @param flushable The {@link Flushable} to add. Needs to fulfill the {@link Object#hashCode()}
+     *            contract.
+     * @return <code>true</code> if the set of flushables did not already contain the specified
+     *         element.
+     */
+    public boolean addFlushable(Flushable flushable);
+
+    /**
+     * Removes a {@link Flushable} from the set of flushables.
+     * 
+     * @param flushable The {@link Flushable} to remove. Needs to fulfill the
+     *            {@link Object#hashCode()} contract.
+     * @return <code>true</code> if the set of flushables contained the specified element.
+     */
+    public boolean removeFlushable(Flushable flushable);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatReader.java
new file mode 100644
index 0000000..95dedf8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+
+/**
+ * An interface that provides methods for reading <code>float</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5FloatReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>float</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public float getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>float[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public float[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>float</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDFloatArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>float</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public float[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>float</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public float read(String objectPath);
+
+    /**
+     * Reads a <code>float</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public float[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDFloatArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>float</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDFloatArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>float</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public float[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>float</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public float[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public float[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public float[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public float[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDFloatArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>float</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>float</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDFloatArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<float[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDFloatArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatWriter.java
new file mode 100644
index 0000000..a8ab516
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5FloatWriter.java
@@ -0,0 +1,546 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+
+/**
+ * An interface that provides methods for writing <code>float</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5FloatWriter extends IHDF5FloatReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>float</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, float value);
+
+    /**
+     * Set a <code>float[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, float[] value);
+
+    /**
+     * Set a multi-dimensional code>float</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDFloatArray value);
+
+    /**
+     * Set a <code>float[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, float[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>float</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, float value);
+
+    /**
+     * Writes out a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, float[] data);
+
+    /**
+     * Writes out a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, float[] data, 
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>float</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5FloatStorageFeatures#FLOAT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5FloatStorageFeatures features);
+    
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5FloatStorageFeature.FLOAT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>float</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, float[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>float</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, float[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, float[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, float[][] data);
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, float[][] data, 
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5FloatStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5FloatStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the float matrix to create.
+     * @param sizeY The size of the y dimension of the float matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the float matrix to create.
+     * @param sizeY The size of the y dimension of the float matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, float[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, float[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, float[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, float[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, float[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDFloatArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDFloatArray data,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>float</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDFloatArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>float</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDFloatArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>float</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5FloatStorageFeatures#FLOAT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5FloatStorageFeatures#FLOAT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDFloatArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>float</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDFloatArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>float</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDFloatArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>float</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>float</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDFloatArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5IntReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5IntReader.java
new file mode 100644
index 0000000..c1cebf3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5IntReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+
+/**
+ * An interface that provides methods for reading <code>int</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5IntReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>int</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public int getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>int[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public int[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>int</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDIntArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>int</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public int[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>int</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public int read(String objectPath);
+
+    /**
+     * Reads a <code>int</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public int[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDIntArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>int</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDIntArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>int</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>int[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public int[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>int</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>int[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public int[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public int[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public int[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public int[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDIntArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>int</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>int</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDIntArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<int[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDIntArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5IntWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5IntWriter.java
new file mode 100644
index 0000000..493dade
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5IntWriter.java
@@ -0,0 +1,586 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+
+/**
+ * An interface that provides methods for writing <code>int</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * <p>   
+ * <i>Note:</i> If you need to convert from and to unsigned values, use the methods of 
+ * {@link UnsignedIntUtils}.
+ * 
+ * @author Bernd Rinn
+ */
+ // Note: It is a trick for keeping backward compatibility to let this interface extend 
+ // IHDF5UnsignedIntWriter instead of IHDF5IntReader as it logically should.
+ // Once we remove IHDF5UnsignedIntWriter, uncomment the following line and remove
+ // all @Override annotations and we are fine again.
+//public interface IHDF5IntWriter extends IHDF5IntReader
+ at SuppressWarnings("deprecation")
+public interface IHDF5IntWriter extends IHDF5UnsignedIntWriter
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setAttr(String objectPath, String name, int value);
+
+    /**
+     * Set a <code>int[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setArrayAttr(String objectPath, String name, int[] value);
+
+    /**
+     * Set a multi-dimensional code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, MDIntArray value);
+
+    /**
+     * Set a <code>int[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMatrixAttr(String objectPath, String name, int[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>int</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    @Override
+    public void write(String objectPath, int value);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    @Override
+    public void writeArray(String objectPath, int[] data);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeArray(String objectPath, int[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    @Override
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>int</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    @Override
+    public void writeArrayBlock(String objectPath, int[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, int[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    @Override
+    public void writeArrayBlockWithOffset(String objectPath, int[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(String objectPath, int[][] data);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMatrix(String objectPath, int[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    @Override
+    public void writeMatrixBlock(String objectPath, int[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, int[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, int[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, int[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, int[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDIntArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDIntArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>int</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDIntArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>int</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDIntArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>int</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    @Override
+    public void writeMDArrayBlock(String objectPath, MDIntArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>int</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDIntArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>int</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDIntArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>int</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>int</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyReader.java
new file mode 100644
index 0000000..4f7f4bf
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyReader.java
@@ -0,0 +1,3342 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * The legacy interface for reading HDF5 files. Do not use in any new code as it will be removed in
+ * a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5LegacyReader extends IHDF5EnumBasicReader, IHDF5CompoundBasicReader
+{
+    // *********************
+    // File level
+    // *********************
+
+    // /////////////////////
+    // Configuration
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if numeric conversions should be performed automatically, e.g.
+     * between <code>float</code> and <code>int</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#file()} instead.
+     */
+    @Deprecated
+    public boolean isPerformNumericConversions();
+
+    /**
+     * Returns the suffix used to mark and recognize internal (house keeping) files and groups. An
+     * empty string ("") encodes for the default, which is two leading and two trailing underscores
+     * ("__NAME__")
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#file()} instead.
+     */
+    @Deprecated
+    public String getHouseKeepingNameSuffix();
+
+    /**
+     * Returns the HDF5 file that this class is reading.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#file()} instead.
+     */
+    @Deprecated
+    public File getFile();
+
+    // /////////////////////
+    // Status
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code> if this reader has been already closed.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#file()} instead.
+     */
+    @Deprecated
+    public boolean isClosed();
+
+    // ***********************
+    // Objects, Links, Groups
+    // ***********************
+
+    // /////////////////////
+    // Objects & Links
+    // /////////////////////
+
+    /**
+     * Returns the link information for the given <var>objectPath</var>. If <var>objectPath</var>
+     * does not exist, the link information will have a type {@link HDF5ObjectType#NONEXISTENT}.
+     */
+    public HDF5LinkInformation getLinkInformation(final String objectPath);
+
+    /**
+     * Returns the object information for the given <var>objectPath</var>. If <var>objectPath</var>
+     * is a symbolic link, this method will return the type of the object that this link points to
+     * rather than the type of the link. If <var>objectPath</var> does not exist, the object
+     * information will have a type {@link HDF5ObjectType#NONEXISTENT} and the other fields will not
+     * be set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5ObjectInformation getObjectInformation(final String objectPath);
+
+    /**
+     * Returns the type of the given <var>objectPath<var>. If <var>followLink</var> is
+     * <code>false</code> and <var>objectPath<var> is a symbolic link, this method will return the
+     * type of the link rather than the type of the object that the link points to.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5ObjectType getObjectType(final String objectPath, boolean followLink);
+
+    /**
+     * Returns the type of the given <var>objectPath</var>. If <var>objectPath</var> is a symbolic
+     * link, this method will return the type of the object that this link points to rather than the
+     * type of the link, that is, it will follow symbolic links.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5ObjectType getObjectType(final String objectPath);
+
+    /**
+     * Returns <code>true</code>, if <var>objectPath</var> exists and <code>false</code> otherwise.
+     * if <var>followLink</var> is <code>false</code> and <var>objectPath</var> is a symbolic link,
+     * this method will return <code>true</code> regardless of whether the link target exists or
+     * not.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean exists(final String objectPath, boolean followLink);
+
+    /**
+     * Creates and returns an internal (house-keeping) version of <var>objectPath</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public String toHouseKeepingPath(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if <var>objectPath</var> denotes an internal (house-keeping)
+     * object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isHouseKeepingObject(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a group and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a group.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isGroup(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data set and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isDataSet(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data set and
+     * <code>false</code> otherwise. Note that if <var>objectPath</var> is a symbolic link, this
+     * method will return <code>true</code> if the link target of the symbolic link is a data set,
+     * that is, this method will follow symbolic links.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isDataSet(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data type and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a data type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isDataType(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data type and
+     * <code>false</code> otherwise. Note that if <var>objectPath</var> is a symbolic link, this
+     * method will return <code>true</code> if the link target of the symbolic link is a data type,
+     * that is, this method will follow symbolic links.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isDataType(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a soft link and
+     * <code>false</code> otherwise.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isSoftLink(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents an external link
+     * and <code>false</code> otherwise.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isExternalLink(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents either a soft
+     * link or an external link and <code>false</code> otherwise.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public boolean isSymbolicLink(final String objectPath);
+
+    /**
+     * Returns the target of the symbolic link that <var>objectPath</var> points to, or
+     * <code>null</code>, if <var>objectPath</var> is not a symbolic link.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public String tryGetSymbolicLinkTarget(final String objectPath);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the <var>objectPath</var> has an attribute with name
+     * <var>attributeName</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return <code>true</code>, if the attribute exists for the object.
+     */
+    public boolean hasAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Returns the names of the attributes of the given <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object (data set or group) to
+     *            return the attributes for.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<String> getAttributeNames(final String objectPath);
+
+    /**
+     * Returns the names of all attributes of the given <var>objectPath</var>.
+     * <p>
+     * This may include attributes that are used internally by the library and are not supposed to
+     * be changed by application programmers.
+     * 
+     * @param objectPath The name (including path information) of the object (data set or group) to
+     *            return the attributes for.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<String> getAllAttributeNames(final String objectPath);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object.
+     * 
+     * @param objectPath The name (including path information) of the object that has the attribute
+     *            to return information about.
+     * @param attributeName The name of the attribute to get information about.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5DataTypeInformation getAttributeInformation(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object.
+     * 
+     * @param objectPath The name (including path information) of the object that has the attribute
+     *            to return information about.
+     * @param attributeName The name of the attribute to get information about.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5DataTypeInformation getAttributeInformation(final String objectPath,
+            final String attributeName, final DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataSetInformation} object. It is a
+     * failure condition if the <var>dataSetPath</var> does not exist or does not identify a data
+     * set.
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5DataSetInformation getDataSetInformation(final String dataSetPath,
+            final DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the total size (in bytes) of <var>objectPath</var>. It is a failure condition if the
+     * <var>dataSetPath</var> does not exist or does not identify a data set. This method follows
+     * symbolic links.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public long getSize(final String objectPath);
+
+    /**
+     * Returns the total number of elements of <var>objectPath</var> It is a failure condition if
+     * the <var>dataSetPath</var> does not exist or does not identify a data set. This method
+     * follows symbolic links.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public long getNumberOfElements(final String objectPath);
+
+    /**
+     * Copies the <var>sourceObject</var> to the <var>destinationObject</var> of the HDF5 file
+     * represented by the <var>destinationWriter</var>. If <var>destiantionObject</var> ends with
+     * "/", it will be considered a group and the name of <var>sourceObject</var> will be appended.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public void copy(String sourceObject, IHDF5Writer destinationWriter, String destinationObject);
+
+    /**
+     * Copies the <var>sourceObject</var> to the root group of the HDF5 file represented by the
+     * <var>destinationWriter</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public void copy(String sourceObject, IHDF5Writer destinationWriter);
+
+    /**
+     * Copies all objects of the file represented by this reader to the root group of the HDF5 file
+     * represented by the <var>destinationWriter</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public void copyAll(IHDF5Writer destinationWriter);
+
+    // /////////////////////
+    // Group
+    // /////////////////////
+
+    /**
+     * Returns all members of <var>groupPath</var>, including internal groups that may be used by
+     * the library to do house-keeping. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<String> getAllGroupMembers(final String groupPath);
+
+    /**
+     * Returns the paths of the members of <var>groupPath</var> (including the parent). The order is
+     * <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the member paths for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<String> getGroupMemberPaths(final String groupPath);
+
+    /**
+     * Returns the link information about the members of <var>groupPath</var>. The order is
+     * <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @param readLinkTargets If <code>true</code>, for symbolic links the link targets will be
+     *            available via {@link HDF5LinkInformation#tryGetSymbolicLinkTarget()}.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<HDF5LinkInformation> getGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets);
+
+    /**
+     * Returns the link information about all members of <var>groupPath</var>. The order is
+     * <i>not</i> well defined.
+     * <p>
+     * This may include attributes that are used internally by the library and are not supposed to
+     * be changed by application programmers.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @param readLinkTargets If <code>true</code>, the link targets will be read for symbolic
+     *            links.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public List<HDF5LinkInformation> getAllGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets);
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the data type variant of <var>objectPath</var>, or <code>null</code>, if no type
+     * variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data type variant or <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5DataTypeVariant tryGetTypeVariant(final String objectPath);
+
+    /**
+     * Returns the data type variant of <var>attributeName</var> of object <var>objectPath</var>, or
+     * <code>null</code>, if no type variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data type variant or <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public HDF5DataTypeVariant tryGetTypeVariant(final String objectPath, String attributeName);
+
+    /**
+     * Returns the path of the data type of the data set <var>objectPath</var>, or <code>null</code>
+     * , if this data set is not of a named data type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public String tryGetDataTypePath(final String objectPath);
+
+    /**
+     * Returns the path of the data <var>type</var>, or <code>null</code>, if <var>type</var> is not
+     * a named data type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#object()} instead.
+     */
+    @Deprecated
+    public String tryGetDataTypePath(HDF5DataType type);
+
+    // *********************
+    // Opaque
+    // *********************
+
+    /**
+     * Returns the tag of the opaque data type associated with <var>objectPath</var>, or
+     * <code>null</code>, if <var>objectPath</var> is not of an opaque data type (i.e. if
+     * <code>reader.getDataSetInformation(objectPath).getTypeInformation().getDataClass() != HDF5DataClass.OPAQUE</code>
+     * ).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The tag of the opaque data type, or <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public String tryGetOpaqueTag(final String objectPath);
+
+    /**
+     * Returns the opaque data type or <code>null</code>, if <var>objectPath</var> is not of such a
+     * data type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The opaque data type, or <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public HDF5OpaqueType tryGetOpaqueType(final String objectPath);
+
+    /**
+     * Gets the (unchanged) byte array values of an attribute <var>attributeName</var> of object
+     * </var>objectPath</var>.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public byte[] getAttributeAsByteArray(final String objectPath, final String attributeName);
+
+    /**
+     * Reads the data set <var>objectPath</var> as byte array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public byte[] readAsByteArray(final String objectPath);
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array (of rank 1).
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set is not of rank 1 or is a String.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public byte[] readAsByteArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array (of rank 1).
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param offset The offset of the block to read as number of elements (starting with 0).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public byte[] readAsByteArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset) throws HDF5JavaException;
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array (of rank 1) into
+     * <var>buffer</var>.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param buffer The buffer to read the values in.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param offset The offset of the block in the data set as number of elements (zero-based).
+     * @param memoryOffset The offset of the block in <var>buffer</var> as number of elements
+     *            (zero-based).
+     * @return The effective block size.
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public int readAsByteArrayToBlockWithOffset(final String objectPath, final byte[] buffer,
+            final int blockSize, final long offset, final int memoryOffset)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#opaque()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<byte[]>> getAsByteArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    // *********************
+    // Boolean
+    // *********************
+
+    /**
+     * Reads a <code>boolean</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @throws HDF5JavaException If the attribute is not a boolean type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#bool()}.
+     */
+    @Deprecated
+    public boolean getBooleanAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>Boolean</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a boolean type.
+     */
+    public boolean readBoolean(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a bit field (which can be considered the equivalent to a boolean array of rank 1) from
+     * the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet readBitField(final String objectPath) throws HDF5DatatypeInterfaceException;
+
+    /**
+     * Reads a block of a bit field (which can be considered the equivalent to a boolean array of
+     * rank 1) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block (in 64 bit words) to read.
+     * @param blockNumber The number of the block to read.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#bool()}.
+     */
+    @Deprecated
+    public BitSet readBitFieldBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block of a bit field (which can be considered the equivalent to a boolean array of
+     * rank 1) from the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block (in 64 bit words) to read.
+     * @param offset The offset of the block (in 64 bit words) to start reading from.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#bool()}.
+     */
+    @Deprecated
+    public BitSet readBitFieldBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Returns <code>true</code> if the <var>bitIndex</var> of the bit field dataset
+     * <var>objectPath</var> is set, <code>false</code> otherwise.
+     * <p>
+     * Will also return <code>false</code>, if <var>bitIndex</var> is outside of the bitfield
+     * dataset.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#bool()}.
+     */
+    @Deprecated
+    public boolean isBitSetInBitField(final String objectPath, final int bitIndex);
+
+    // *********************
+    // Byte
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>byte</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte getByteAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>byte[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[] getByteArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>byte</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public MDByteArray getByteMDArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>byte</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[][] getByteMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>byte</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte readByte(final String objectPath);
+
+    /**
+     * Reads a <code>byte</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[] readByteArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array data set <var>objectPath</var> into a given
+     * <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public int[] readToByteMDArrayWithOffset(final String objectPath, final MDByteArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>byte</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public int[] readToByteMDArrayBlockWithOffset(final String objectPath, final MDByteArray array,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>byte</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>byte[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[] readByteArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>byte</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>byte[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[] readByteArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[][] readByteMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[][] readByteMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>byte</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public byte[][] readByteMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public MDByteArray readByteMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public MDByteArray readByteMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>byte</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public MDByteArray readByteMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<byte[]>> getByteArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int8()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDByteArray>> getByteMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    // *********************
+    // Short
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>short</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short getShortAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>short[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[] getShortArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>short</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public MDShortArray getShortMDArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>short</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[][] getShortMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>short</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short readShort(final String objectPath);
+
+    /**
+     * Reads a <code>short</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[] readShortArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array data set <var>objectPath</var> into a
+     * given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public int[] readToShortMDArrayWithOffset(final String objectPath, final MDShortArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>short</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public int[] readToShortMDArrayBlockWithOffset(final String objectPath,
+            final MDShortArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>short</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>short[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[] readShortArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>short</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>short[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[] readShortArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[][] readShortMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[][] readShortMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public short[][] readShortMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public MDShortArray readShortMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public MDShortArray readShortMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public MDShortArray readShortMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<short[]>> getShortArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int16()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDShortArray>> getShortMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    // *********************
+    // Int
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>int</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int getIntAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>int[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[] getIntArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>int</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public MDIntArray getIntMDArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>int</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[][] getIntMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>int</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public int readInt(final String objectPath);
+
+    /**
+     * Reads a <code>int</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public int[] readIntArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array data set <var>objectPath</var> into a given
+     * <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[] readToIntMDArrayWithOffset(final String objectPath, final MDIntArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>int</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[] readToIntMDArrayBlockWithOffset(final String objectPath, final MDIntArray array,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>int</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>int[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[] readIntArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>int</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>int[]</code> returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[] readIntArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public int[][] readIntMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[][] readIntMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public int[][] readIntMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public MDIntArray readIntMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public MDIntArray readIntMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>int</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public MDIntArray readIntMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<int[]>> getIntArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int32()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDIntArray>> getIntMDArrayNaturalBlocks(final String dataSetPath);
+
+    // *********************
+    // Long
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>long</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long getLongAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>long[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[] getLongArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>long</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public MDLongArray getLongMDArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>long</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[][] getLongMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>long</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public long readLong(final String objectPath);
+
+    /**
+     * Reads a <code>long</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public long[] readLongArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array data set <var>objectPath</var> into a given
+     * <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public int[] readToLongMDArrayWithOffset(final String objectPath, final MDLongArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>long</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public int[] readToLongMDArrayBlockWithOffset(final String objectPath, final MDLongArray array,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>long</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[] readLongArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>long</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[] readLongArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public long[][] readLongMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[][] readLongMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public long[][] readLongMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public MDLongArray readLongMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public MDLongArray readLongMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public MDLongArray readLongMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<long[]>> getLongArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#int64()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getLongMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    // *********************
+    // Float
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>float</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float getFloatAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>float[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[] getFloatArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>float</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public MDFloatArray getFloatMDArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>float</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[][] getFloatMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>float</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public float readFloat(final String objectPath);
+
+    /**
+     * Reads a <code>float</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public float[] readFloatArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array data set <var>objectPath</var> into a
+     * given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public int[] readToFloatMDArrayWithOffset(final String objectPath, final MDFloatArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>float</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public int[] readToFloatMDArrayBlockWithOffset(final String objectPath,
+            final MDFloatArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>float</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[] readFloatArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>float</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>float[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[] readFloatArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public float[][] readFloatMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[][] readFloatMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public float[][] readFloatMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public MDFloatArray readFloatMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public MDFloatArray readFloatMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>float</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public MDFloatArray readFloatMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<float[]>> getFloatArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float32()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDFloatArray>> getFloatMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    // *********************
+    // Double
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>double</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double getDoubleAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a <code>double[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[] getDoubleArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>double</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public MDDoubleArray getDoubleMDArrayAttribute(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Reads a <code>double</code> matrix attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[][] getDoubleMatrixAttribute(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>double</code> value from the data set <var>objectPath</var>. This method
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public double readDouble(final String objectPath);
+
+    /**
+     * Reads a <code>double</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public double[] readDoubleArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array data set <var>objectPath</var> into a
+     * given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public int[] readToDoubleMDArrayWithOffset(final String objectPath, final MDDoubleArray array,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>double</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public int[] readToDoubleMDArrayBlockWithOffset(final String objectPath,
+            final MDDoubleArray array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>double</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>double[]</code>
+     *            returned if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[] readDoubleArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from <code>double</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>double[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[] readDoubleArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public double[][] readDoubleMatrix(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[][] readDoubleMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public double[][] readDoubleMatrixBlockWithOffset(final String objectPath,
+            final int blockSizeX, final int blockSizeY, final long offsetX, final long offsetY)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public MDDoubleArray readDoubleMDArray(final String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public MDDoubleArray readDoubleMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional <code>double</code> array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public MDDoubleArray readDoubleMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<double[]>> getDoubleArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#float64()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDDoubleArray>> getDoubleMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    // *********************
+    // String
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a string attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public String getStringAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a string array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public String[] getStringArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional string array attribute named <var>attributeName</var> from the
+     * object <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public MDArray<String> getStringMDArrayAttribute(final String objectPath,
+            final String attributeName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a string from the data set <var>objectPath</var>. Considers '\0' as end of string. This
+     * needs to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String readString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a string array (of rank 1) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readStringArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param blockNumber The number of the block to read from the data set (the offset is
+     *            <code>blockSize * blockNumber</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public String[] readStringArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public String[] readStringArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a string array (of rank N) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public MDArray<String> readStringMDArray(final String objectPath);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param blockNumber The number of the block to read from the data set (the offset in each
+     *            dimension i is <code>blockSize[i] * blockNumber[i]</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public MDArray<String> readStringMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public MDArray<String> readStringMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional string data set to iterate over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<String[]>> getStringArrayNaturalBlocks(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional string data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#string()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getStringMDArrayNaturalBlocks(
+            final String objectPath);
+
+    // *********************
+    // Date & Time
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the attribute <var>attributeName</var> of data set
+     * <var>objectPath</var> is a time stamp and <code>false</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public boolean isTimeStamp(final String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long getTimeStampAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a time stamp array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp array; each element is a number of milliseconds since January 1, 1970,
+     *         00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long[] getTimeStampArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a time stamp attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var> and returns it as a <code>Date</code>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public Date getDateAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a time stamp array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var> and returns it as a <code>Date[]</code>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the attribute <var>attributeName</var> of objct
+     *             <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public Date[] getDateArrayAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Returns <code>true</code>, if the attribute <var>attributeName</var> of data set
+     * <var>objectPath</var> is a time duration and <code>false</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public boolean isTimeDuration(final String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time duration.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeDuration getTimeDurationAttribute(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Reads a time duration array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time duration.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeDurationArray getTimeDurationArrayAttribute(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Returns the time unit, if the attribute given by <var>attributeName</var> of object
+     * <var>objectPath</var> is a time duration and <code>null</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeUnit tryGetTimeUnit(final String objectPath, final String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the data set given by <var>objectPath</var> is a time stamp and
+     * <code>false</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public boolean isTimeStamp(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Returns <code>true</code>, if the data set given by <var>objectPath</var> is a time duration
+     * and <code>false</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public boolean isTimeDuration(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Returns the time unit, if the data set given by <var>objectPath</var> is a time duration and
+     * <code>null</code> otherwise.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeUnit tryGetTimeUnit(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp value from the data set <var>objectPath</var>. The time stamp is stored as
+     * a <code>long</code> value in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long readTimeStamp(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp array from the data set <var>objectPath</var>. The time stamp is stored as
+     * a <code>long</code> value in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as number of milliseconds since January 1, 1970, 00:00:00 GMT.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long[] readTimeStampArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time stamp array (of rank 1) from the data set <var>objectPath</var>. The
+     * time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged as
+     * type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long[] readTimeStampArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block of a time stamp array (of rank 1) from the data set <var>objectPath</var>. The
+     * time stamp is stored as a <code>long</code> value in the HDF5 file. It needs to be tagged as
+     * type variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public long[] readTimeStampArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time stamps to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#time()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<long[]>> getTimeStampArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp value from the data set <var>objectPath</var> and returns it as a
+     * {@link Date}. The time stamp is stored as a <code>long</code> value in the HDF5 file. It
+     * needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date readDate(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time stamp array (of rank 1) from the data set <var>objectPath</var> and returns it
+     * as an array of {@link Date}s. The time stamp array is stored as a an array of
+     * <code>long</code> values in the HDF5 file. It needs to be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date[] readDateArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration value and its unit from the data set <var>objectPath</var>. It needs to
+     * be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5Writer#setTypeVariant(String, HDF5DataTypeVariant)}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration and its unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDuration readTimeDuration(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration value and its unit from the data set <var>objectPath</var>, converts it
+     * to the given <var>timeUnit</var> and returns it as <code>long</code>. It needs to be tagged
+     * as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, long, HDF5TimeUnit)} or can be done by calling
+     * {@link IHDF5Writer#setTypeVariant(String, HDF5DataTypeVariant)}, most conveniantly by code
+     * like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration and its unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#read(String)} instead.
+     */
+    @Deprecated
+    public HDF5TimeDuration readTimeDurationAndUnit(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration value from the data set <var>objectPath</var>, converts it to the given
+     * <var>timeUnit</var> and returns it as <code>long</code>. It needs to be tagged as one of the
+     * type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, long, HDF5TimeUnit)} or can be done by calling
+     * {@link IHDF5Writer#setTypeVariant(String, HDF5DataTypeVariant)}, most conveniantly by code
+     * like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeUnit The time unit that the duration should be converted to.
+     * @return The time duration in the given unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#read(String)} and
+     *             {@link HDF5TimeUnit#convert(HDF5TimeDuration)} instead.
+     */
+    @Deprecated
+    public long readTimeDuration(final String objectPath, final HDF5TimeUnit timeUnit)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration array from the data set <var>objectPath</var>. It needs to be tagged as
+     * one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration in seconds.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationArray readTimeDurationArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration array from the data set <var>objectPath</var>and returns it as a
+     * <code>HDF5TimeDuration[]</code>. It needs to be tagged as one of the type variants that
+     * indicate a time duration, for example {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time durations in their respective time unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @see #readTimeDurationArray(String, HDF5TimeUnit)
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArray(String)} instead.
+     */
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration array from the data set <var>objectPath</var>, converts it to the given
+     * <var>timeUnit</var> and returns it as a <code>long[]</code> array. It needs to be tagged as
+     * one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeUnit The time unit that the duration should be converted to.
+     * @return The time duration in the given unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArray(String)} and
+     *             {@link HDF5TimeUnit#convert(HDF5TimeDurationArray)} instead.
+     */
+    @Deprecated
+    public long[] readTimeDurationArray(final String objectPath, final HDF5TimeUnit timeUnit)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * It needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be
+     *         <code>min(size - blockSize*blockNumber,
+     *         blockSize)</code>.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeDurationArray readTimeDurationArrayBlock(final String objectPath,
+            final int blockSize, final long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * The time durations are stored as a <code>long[]</code> value in the HDF5 file. It needs to be
+     * tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @param timeUnit The time unit that the duration should be converted to.
+     * @return The data read from the data set. The length will be
+     *         <code>min(size - blockSize*blockNumber,
+     *         blockSize)</code>.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArrayBlock(String, int, long)} and
+     *             {@link HDF5TimeUnit#convert(long[], HDF5TimeUnit)} instead.
+     */
+    @Deprecated
+    public long[] readTimeDurationArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber, final HDF5TimeUnit timeUnit) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * It needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public HDF5TimeDurationArray readTimeDurationArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * The time durations are stored as a <code>long[]</code> value in the HDF5 file. It needs to be
+     * tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @param timeUnit The time unit that the duration should be converted to.
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArrayBlockWithOffset(String, int, long)}
+     *             and {@link HDF5TimeUnit#convert(HDF5TimeDurationArray)} instead.
+     */
+    @Deprecated
+    public long[] readTimeDurationArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset, final HDF5TimeUnit timeUnit)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * The time durations are stored as a <code>HDF5TimeDuration[]</code> value in the HDF5 file. It
+     * needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be
+     *         <code>min(size - blockSize*blockNumber,
+     *         blockSize)</code>.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArrayBlock(String, int, long)} and
+     *             {@link HDF5TimeUnit#convert(HDF5TimeDuration[])} instead.
+     */
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlock(final String objectPath,
+            final int blockSize, final long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * The time durations are stored as a <code>HDF5TimeDuration[]</code> value in the HDF5 file. It
+     * needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * See {@link #readTimeDuration(String, HDF5TimeUnit)} for how the tagging is done.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     * @deprecated Use {@link IHDF5TimeDurationReader#readArrayBlockWithOffset(String, int, long)}
+     *             and {@link HDF5TimeUnit#convert(HDF5TimeDuration[])} instead.
+     */
+    @Deprecated
+    public HDF5TimeDuration[] readTimeDurationAndUnitArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time durations to iterate
+     * over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of a time duration data type or not of rank
+     *             1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#duration()} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<HDF5TimeDurationArray>> getTimeDurationArrayNaturalBlocks(
+            final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time durations to iterate
+     * over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeUnit The time unit that the duration should be converted to.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of a time duration data type or not of rank
+     *             1.
+     * @deprecated Use {@link IHDF5TimeDurationReader#getArrayNaturalBlocks(String)} and
+     *             {@link HDF5TimeUnit#convert(long[], HDF5TimeUnit)} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<long[]>> getTimeDurationArrayNaturalBlocks(
+            final String objectPath, final HDF5TimeUnit timeUnit) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time durations to iterate
+     * over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of a time duration data type or not of rank
+     *             1.
+     * @deprecated Use {@link IHDF5TimeDurationReader#getArrayNaturalBlocks(String)} instead.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<HDF5TimeDuration[]>> getTimeDurationAndUnitArrayNaturalBlocks(
+            final String objectPath) throws HDF5JavaException;
+
+    // *********************
+    // Reference
+    // *********************
+
+    // //////////////////////////////
+    // Specific to object references
+    // //////////////////////////////
+
+    /**
+     * Resolves the path of a reference which has been read without name resolution.
+     * 
+     * @param reference Reference encoded as string.
+     * @return The path in the HDF5 file.
+     * @see #readObjectReferenceArray(String, boolean)
+     * @throws HDF5JavaException if <var>reference</var> is not a string-encoded reference.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String resolvePath(final String reference) throws HDF5JavaException;
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads an object reference attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the name of the object. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String getObjectReferenceAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Reads an object reference attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the name of the object referenced,
+     *            otherwise returns the references itself.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String getObjectReferenceAttribute(final String objectPath, final String attributeName,
+            final boolean resolveName);
+
+    /**
+     * Reads a 1D object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] getObjectReferenceArrayAttribute(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Reads a 1D object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] getObjectReferenceArrayAttribute(final String objectPath,
+            final String attributeName, final boolean resolveName);
+
+    /**
+     * Reads an object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> getObjectReferenceMDArrayAttribute(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Reads an object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> getObjectReferenceMDArrayAttribute(final String objectPath,
+            final String attributeName, boolean resolveName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads an object reference from the object <var>objectPath</var>, resolving the name of the
+     * object. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String readObjectReference(final String objectPath);
+
+    /**
+     * Reads an object reference from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the name of the object referenced,
+     *            otherwise returns the references itself.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String readObjectReference(final String objectPath, final boolean resolveName);
+
+    /**
+     * Reads an array of object references from the object <var>objectPath</var>, resolving the
+     * names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The array of the paths of objects that the references refers to. Each string may be
+     *         empty, if the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArray(final String objectPath);
+
+    /**
+     * Reads an array of object references from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The array of the paths of objects that the references refers to. Each string may be
+     *         empty, if the corresponding object reference refers to an unnamed object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArray(final String objectPath, boolean resolveName);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The referenced data set paths read from the data set. The length will be min(size -
+     *         blockSize*blockNumber, blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths read from the data set. The length will be min(size -
+     *         blockSize*blockNumber, blockSize).
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber, final boolean resolveName);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public String[] readObjectReferenceArrayBlockWithOffset(final String objectPath,
+            final int blockSize, final long offset, final boolean resolveName);
+
+    /**
+     * Reads an array (or rank N) of object references from the object <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The multi-dimensional array of the paths of objects that the references refers to.
+     *         Each string may be empty, if the corresponding object reference refers to an unnamed
+     *         object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArray(final String objectPath);
+
+    /**
+     * Reads an array (or rank N) of object references from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The multi-dimensional array of the paths of objects that the references refers to.
+     *         Each string may be empty, if the corresponding object reference refers to an unnamed
+     *         object.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArray(final String objectPath, boolean resolveName);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArrayBlock(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber, final boolean resolveName);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public MDArray<String> readObjectReferenceMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset, final boolean resolveName);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<String[]>> getObjectReferenceArrayNaturalBlocks(
+            final String dataSetPath);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public Iterable<HDF5DataBlock<String[]>> getObjectReferenceArrayNaturalBlocks(
+            final String dataSetPath, final boolean resolveName);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getObjectReferenceMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     * @deprecated Use the corresponding method in {@link IHDF5Reader#reference()}.
+     */
+    @Deprecated
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getObjectReferenceMDArrayNaturalBlocks(
+            final String dataSetPath, final boolean resolveName);
+
+    /**
+     * Returns the full reader for compounds.
+     * 
+     * @deprecated Use {@link IHDF5Reader#compound()} instead.
+     */
+
+    // *********************
+    // Compounds
+    // *********************
+
+    @Deprecated
+    public IHDF5CompoundReader compounds();
+
+    // *********************
+    // Enums
+    // *********************
+
+    /**
+     * Returns the full reader for enums.
+     * 
+     * @deprecated Use {@link IHDF5Reader#enumeration()} instead.
+     */
+    @Deprecated
+    public IHDF5EnumReader enums();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyWriter.java
new file mode 100644
index 0000000..ae900b1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5LegacyWriter.java
@@ -0,0 +1,5139 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.Flushable;
+import java.util.BitSet;
+import java.util.Date;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SymbolTableException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * The legacy interface for writing HDF5 files. Do not use in any new code as it will be removed in
+ * a future version of JHDF5.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5LegacyWriter extends IHDF5EnumBasicWriter, IHDF5CompoundBasicWriter
+{
+    // *********************
+    // File level
+    // *********************
+
+    // /////////////////////
+    // Configuration
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the {@link IHDF5WriterConfigurator} was <em>not</em> configured
+     * with {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}, that is if extendable data
+     * types are used for new data sets.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public boolean isUseExtendableDataTypes();
+
+    /**
+     * Returns the {@link FileFormat} compatibility setting for this writer.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public FileFormat getFileFormat();
+
+    // /////////////////////
+    // Flushing and Syncing
+    // /////////////////////
+
+    /**
+     * Flushes the cache to disk (without discarding it). Note that this may or may not trigger a
+     * <code>fsync(2)</code>, depending on the {@link IHDF5WriterConfigurator.SyncMode} used.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public void flush();
+
+    /**
+     * Flushes the cache to disk (without discarding it) and synchronizes the file with the
+     * underlying storage using a method like <code>fsync(2)</code>, regardless of what
+     * {@link IHDF5WriterConfigurator.SyncMode} has been set for this file.
+     * <p>
+     * This method blocks until <code>fsync(2)</code> has returned.
+     * 
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public void flushSyncBlocking();
+
+    /**
+     * Adds a {@link Flushable} to the set of flushables. This set is flushed when {@link #flush()}
+     * or {@link #flushSyncBlocking()} are called and before the writer is closed.
+     * <p>
+     * This function is supposed to be used for in-memory caching structures that need to make it
+     * into the HDF5 file.
+     * <p>
+     * If the <var>flushable</var> implements
+     * {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy}, in case of an exception in
+     * {@link Flushable#flush()}, the method
+     * {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy#dealWithError(Throwable)} will be
+     * called to decide how do deal with the exception.
+     * 
+     * @param flushable The {@link Flushable} to add. Needs to fulfill the {@link Object#hashCode()}
+     *            contract.
+     * @return <code>true</code> if the set of flushables did not already contain the specified
+     *         element.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public boolean addFlushable(Flushable flushable);
+
+    /**
+     * Removes a {@link Flushable} from the set of flushables.
+     * 
+     * @param flushable The {@link Flushable} to remove. Needs to fulfill the
+     *            {@link Object#hashCode()} contract.
+     * @return <code>true</code> if the set of flushables contained the specified element.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#file()} instead.
+     */
+    @Deprecated
+    public boolean removeFlushable(Flushable flushable);
+
+    // ***********************
+    // Objects, Links, Groups
+    // ***********************
+
+    /**
+     * Creates a hard link.
+     * 
+     * @param currentPath The name of the data set (including path information) to create a link to.
+     * @param newPath The name (including path information) of the link to create.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createHardLink(String currentPath, String newPath);
+
+    /**
+     * Creates a soft link.
+     * 
+     * @param targetPath The name of the data set (including path information) to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createSoftLink(String targetPath, String linkPath);
+
+    /**
+     * Creates or updates a soft link.
+     * <p>
+     * <em>Note: This method will never overwrite a data set, but only a symbolic link.</em>
+     * 
+     * @param targetPath The name of the data set (including path information) to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createOrUpdateSoftLink(String targetPath, String linkPath);
+
+    /**
+     * Creates an external link, that is a link to a data set in another HDF5 file, the
+     * <em>target</em> .
+     * <p>
+     * <em>Note: This method is only allowed when the {@link IHDF5WriterConfigurator} was not 
+     * configured to enforce strict HDF5 1.6 compatibility.</em>
+     * 
+     * @param targetFileName The name of the file where the data set resides that should be linked.
+     * @param targetPath The name of the data set (including path information) in the
+     *            <var>targetFileName</var> to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @throws IllegalStateException If the {@link IHDF5WriterConfigurator} was configured to
+     *             enforce strict HDF5 1.6 compatibility.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException;
+
+    /**
+     * Creates or updates an external link, that is a link to a data set in another HDF5 file, the
+     * <em>target</em> .
+     * <p>
+     * <em>Note: This method will never overwrite a data set, but only a symbolic link.</em>
+     * <p>
+     * <em>Note: This method is only allowed when the {@link IHDF5WriterConfigurator} was not 
+     * configured to enforce strict HDF5 1.6 compatibility.</em>
+     * 
+     * @param targetFileName The name of the file where the data set resides that should be linked.
+     * @param targetPath The name of the data set (including path information) in the
+     *            <var>targetFileName</var> to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @throws IllegalStateException If the {@link IHDF5WriterConfigurator} was configured to
+     *             enforce strict HDF5 1.6 compatibility.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createOrUpdateExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException;
+
+    /**
+     * Moves or renames a link in the file atomically.
+     * 
+     * @throws HDF5SymbolTableException If <var>oldLinkPath</var> does not exist or if
+     *             <var>newLinkPath</var> already exists.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void move(String oldLinkPath, String newLinkPath) throws HDF5SymbolTableException;
+
+    // /////////////////////
+    // Group
+    // /////////////////////
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * 
+     * @param groupPath The path of the group to create.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createGroup(final String groupPath);
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file, giving the library a hint
+     * about the size (<var>sizeHint</var>). If you have this information in advance, it will be
+     * more efficient to tell it the library rather than to let the library figure out itself, but
+     * the hint must not be misunderstood as a limit.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * <p>
+     * <i>Note: This method creates an "old-style group", that is the type of group of HDF5 1.6 and
+     * earlier.</i>
+     * 
+     * @param groupPath The path of the group to create.
+     * @param sizeHint The estimated size of all group entries (in bytes).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createGroup(final String groupPath, final int sizeHint);
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file, giving the library hints
+     * about when to switch between compact and dense. Setting appropriate values may improve
+     * performance.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * <p>
+     * <i>Note: This method creates a "new-style group", that is the type of group of HDF5 1.8 and
+     * above. Thus it will fail, if the writer is configured to enforce HDF5 1.6 compatibility.</i>
+     * 
+     * @param groupPath The path of the group to create.
+     * @param maxCompact When the group grows to more than this number of entries, the library will
+     *            convert the group style from compact to dense.
+     * @param minDense When the group shrinks below this number of entries, the library will convert
+     *            the group style from dense to compact.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void createGroup(final String groupPath, final int maxCompact, final int minDense);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Sets the data set size of a one-dimensional data set to <var>newSize</var>. Note that this
+     * method can only be applied to extendable data sets.
+     * 
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not extendable.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void setDataSetSize(final String objectPath, final long newSize);
+
+    /**
+     * Sets the data set size of a multi-dimensional data set to <var>newDimensions</var>. Note that
+     * this method can only be applied to extendable data sets.
+     * 
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not extendable.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void setDataSetDimensions(final String objectPath, final long[] newDimensions);
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Sets a <var>typeVariant</var> of object <var>objectPath</var>.
+     * 
+     * @param objectPath The name of the object to add the type variant to.
+     * @param typeVariant The type variant to add.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void setTypeVariant(final String objectPath, final HDF5DataTypeVariant typeVariant);
+
+    /**
+     * Sets a <var>typeVariant</var> of attribute <var>attributeName</var> of object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name of the object.
+     * @param attributeName The name of attribute to add the type variant to.
+     * @param typeVariant The type variant to add.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void setTypeVariant(final String objectPath, final String attributeName,
+            final HDF5DataTypeVariant typeVariant);
+
+    /**
+     * Deletes the <var>typeVariant</var> from <var>objectPath</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to delete the type variant from.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void deleteTypeVariant(final String objectPath);
+
+    /**
+     * Deletes the <var>typeVariant</var> from <var>attributeName</var> of <var>objectPath</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object.
+     * @param attributeName The name of the attribute to delete the type variant from.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void deleteTypeVariant(final String objectPath, final String attributeName);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Deletes an attribute.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to delete the attribute from.
+     * @param name The name of the attribute to delete.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#object()} instead.
+     */
+    @Deprecated
+    public void deleteAttribute(final String objectPath, final String name);
+
+    // *********************
+    // Opaque
+    // *********************
+
+    /**
+     * Writes out an opaque data type described by <var>tag</var> and defined by a <code>byte</code>
+     * array (of rank 1).
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArray(String)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param tag The tag of the data set.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public void writeOpaqueByteArray(final String objectPath, final String tag, final byte[] data);
+
+    /**
+     * Writes out an opaque data type described by <var>tag</var> and defined by a <code>byte</code>
+     * array (of rank 1).
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArray(String)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param tag The tag of the data set.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public void writeOpaqueByteArray(final String objectPath, final String tag, final byte[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create.
+     * @param blockSize The size of on block (for block-wise IO)
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeOpaqueByteArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeOpaqueByteArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public HDF5OpaqueType createOpaqueByteArray(final String objectPath, final String tag,
+            final long size, final int blockSize);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeOpaqueByteArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeOpaqueByteArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public HDF5OpaqueType createOpaqueByteArray(final String objectPath, final String tag,
+            final int size);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create.
+     * @param blockSize The size of on block (for block-wise IO)
+     * @param features The storage features of the data set.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeOpaqueByteArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeOpaqueByteArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public HDF5OpaqueType createOpaqueByteArray(final String objectPath, final String tag,
+            final long size, final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeOpaqueByteArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeOpaqueByteArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public HDF5OpaqueType createOpaqueByteArray(final String objectPath, final String tag,
+            final int size, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of an opaque data type represented by a <code>byte</code> array (of rank
+     * 1). The data set needs to have been created by
+     * {@link #createOpaqueByteArray(String, String, long, int, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createOpaqueByteArray(String, String, long, int, HDF5GenericStorageFeatures)} call
+     * that was used to created the data set.
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArrayBlock(String, int, long)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public void writeOpaqueByteArrayBlock(final String objectPath, final HDF5OpaqueType dataType,
+            final byte[] data, final long blockNumber);
+
+    /**
+     * Writes out a block of an opaque data type represented by a <code>byte</code> array (of rank
+     * 1). The data set needs to have been created by
+     * {@link #createOpaqueByteArray(String, String, long, int, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #writeOpaqueByteArrayBlock(String, HDF5OpaqueType, byte[], long)} if the total size of
+     * the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createOpaqueByteArray(String, String, long, int, HDF5GenericStorageFeatures)} call
+     * that was used to created the data set.
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArrayBlockWithOffset(String, int, long)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#opaque()} instead.
+     */
+    @Deprecated
+    public void writeOpaqueByteArrayBlockWithOffset(final String objectPath,
+            final HDF5OpaqueType dataType, final byte[] data, final int dataSize, final long offset);
+
+    // *********************
+    // Boolean
+    // *********************
+
+    /**
+     * Sets a <code>boolean</code> attribute to the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void setBooleanAttribute(final String objectPath, final String name, final boolean value);
+
+    /**
+     * Writes out a <code>boolean</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     */
+    public void writeBoolean(final String objectPath, final boolean value);
+
+    /**
+     * Writes out a bit field ((which can be considered the equivalent to a boolean array of rank
+     * 1), provided as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeBitField(final String objectPath, final BitSet data);
+
+    /**
+     * Writes out a bit field ((which can be considered the equivalent to a boolean array of rank
+     * 1), provided as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link IHDF5LongReader#readArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void writeBitField(final String objectPath, final BitSet data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. This will be the total
+     *            size for non-extendable data sets and the size of one chunk for extendable
+     *            (chunked) data sets. For extendable data sets the initial size of the array will
+     *            be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void createBitField(final String objectPath, final int size);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void createBitField(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a bit field array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. This will be the total
+     *            size for non-extendable data sets and the size of one chunk for extendable
+     *            (chunked) data sets. For extendable data sets the initial size of the array will
+     *            be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void createBitField(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a bit field (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size (in 64 bit words) of the bit field to create. When using extendable data
+     *            sets ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no
+     *            data set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void createBitField(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a bit field (of rank 1). The data set needs to have been created by
+     * {@link #createBitField(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createBitField(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code>
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void writeBitFieldBlock(final String objectPath, final BitSet data, final int dataSize,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createBitField(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeBitFieldBlock(String, BitSet, int, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createBitField(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code>
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#bool()}.
+     */
+    @Deprecated
+    public void writeBitFieldBlockWithOffset(final String objectPath, BitSet data,
+            final int dataSize, final long offset);
+
+    // *********************
+    // Byte
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void setByteAttribute(final String objectPath, final String name, final byte value);
+
+    /**
+     * Set a <code>byte[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void setByteArrayAttribute(final String objectPath, final String name, final byte[] value);
+
+    /**
+     * Set a multi-dimensional code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void setByteMDArrayAttribute(final String objectPath, final String name,
+            final MDByteArray value);
+
+    /**
+     * Set a <code>byte[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void setByteMatrixAttribute(final String objectPath, final String name,
+            final byte[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>byte</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByte(final String objectPath, final byte value);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeByteArray(final String objectPath, final byte[] data);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteArray(final String objectPath, final byte[] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createByteArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createByteArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteArrayBlock(final String objectPath, final byte[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createByteArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeByteArrayBlock(String, byte[], long)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createByteArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteArrayBlockWithOffset(final String objectPath, final byte[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMatrix(final String objectPath, final byte[][] data);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMatrix(final String objectPath, final byte[][] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMatrix(final String objectPath, final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMatrixBlock(final String objectPath, final byte[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeByteMatrixBlock(String, byte[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeByteMatrixBlock(String, byte[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createByteMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMatrixBlockWithOffset(final String objectPath, final byte[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMDArray(final String objectPath, final MDByteArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMDArray(final String objectPath, final MDByteArray data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the byte array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void createByteMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMDArrayBlock(final String objectPath, final MDByteArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int8()}.
+     */
+    @Deprecated
+    public void writeByteMDArrayBlockWithOffset(final String objectPath, final MDByteArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Short
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void setShortAttribute(final String objectPath, final String name, final short value);
+
+    /**
+     * Set a <code>short[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void setShortArrayAttribute(final String objectPath, final String name,
+            final short[] value);
+
+    /**
+     * Set a multi-dimensional code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void setShortMDArrayAttribute(final String objectPath, final String name,
+            final MDShortArray value);
+
+    /**
+     * Set a <code>short[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void setShortMatrixAttribute(final String objectPath, final String name,
+            final short[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>short</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShort(final String objectPath, final short value);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortArray(final String objectPath, final short[] data);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortArray(final String objectPath, final short[] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createShortArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createShortArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortArrayBlock(final String objectPath, final short[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createShortArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeShortArrayBlock(String, short[], long)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createShortArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortArrayBlockWithOffset(final String objectPath, final short[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMatrix(final String objectPath, final short[][] data);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMatrix(final String objectPath, final short[][] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMatrix(final String objectPath, final int blockSizeX,
+            final int blockSizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMatrixBlock(final String objectPath, final short[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeShortMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeShortMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createShortMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMatrixBlockWithOffset(final String objectPath, final short[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMDArray(final String objectPath, final MDShortArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMDArray(final String objectPath, final MDShortArray data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the short array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void createShortMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMDArrayBlock(final String objectPath, final MDShortArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int16()}.
+     */
+    @Deprecated
+    public void writeShortMDArrayBlockWithOffset(final String objectPath, final MDShortArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Int
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void setIntAttribute(final String objectPath, final String name, final int value);
+
+    /**
+     * Set a <code>int[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void setIntArrayAttribute(final String objectPath, final String name, final int[] value);
+
+    /**
+     * Set a multi-dimensional code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void setIntMDArrayAttribute(final String objectPath, final String name,
+            final MDIntArray value);
+
+    /**
+     * Set a <code>int[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setIntMatrixAttribute(final String objectPath, final String name,
+            final int[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>int</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeInt(final String objectPath, final int value);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeIntArray(final String objectPath, final int[] data);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntArray(final String objectPath, final int[] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createIntArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createIntArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntArrayBlock(final String objectPath, final int[] data, final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createIntArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeIntArrayBlock(String, int[], long)} if the total size
+     * of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createIntArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntArrayBlockWithOffset(final String objectPath, final int[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeIntMatrix(final String objectPath, final int[][] data);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMatrix(final String objectPath, final int[][] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMatrix(final String objectPath, final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to have
+     * been created by
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMatrixBlock(final String objectPath, final int[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to have
+     * been created by
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeIntMatrixBlock(String, int[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to have
+     * been created by
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeIntMatrixBlock(String, int[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createIntMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMatrixBlockWithOffset(final String objectPath, final int[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMDArray(final String objectPath, final MDIntArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMDArray(final String objectPath, final MDIntArray data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the int array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void createIntMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMDArrayBlock(final String objectPath, final MDIntArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int32()}.
+     */
+    @Deprecated
+    public void writeIntMDArrayBlockWithOffset(final String objectPath, final MDIntArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Long
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void setLongAttribute(final String objectPath, final String name, final long value);
+
+    /**
+     * Set a <code>long[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void setLongArrayAttribute(final String objectPath, final String name, final long[] value);
+
+    /**
+     * Set a multi-dimensional code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void setLongMDArrayAttribute(final String objectPath, final String name,
+            final MDLongArray value);
+
+    /**
+     * Set a <code>long[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void setLongMatrixAttribute(final String objectPath, final String name,
+            final long[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>long</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeLong(final String objectPath, final long value);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeLongArray(final String objectPath, final long[] data);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongArray(final String objectPath, final long[] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createLongArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createLongArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createLongArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeLongArrayBlock(String, long[], long)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createLongArray(String, long, int, HDF5IntStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeLongMatrix(final String objectPath, final long[][] data);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMatrix(final String objectPath, final long[][] data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMatrix(final String objectPath, final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMatrixBlock(final String objectPath, final long[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeLongMatrixBlock(String, long[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeLongMatrixBlock(String, long[][], long, long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createLongMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMatrixBlockWithOffset(final String objectPath, final long[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMDArray(final String objectPath, final MDLongArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMDArray(final String objectPath, final MDLongArray data,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the long array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void createLongMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMDArrayBlock(final String objectPath, final MDLongArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#int64()}.
+     */
+    @Deprecated
+    public void writeLongMDArrayBlockWithOffset(final String objectPath, final MDLongArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Float
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>float</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void setFloatAttribute(final String objectPath, final String name, final float value);
+
+    /**
+     * Set a <code>float[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void setFloatArrayAttribute(final String objectPath, final String name,
+            final float[] value);
+
+    /**
+     * Set a multi-dimensional code>float</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void setFloatMDArrayAttribute(final String objectPath, final String name,
+            final MDFloatArray value);
+
+    /**
+     * Set a <code>float[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void setFloatMatrixAttribute(final String objectPath, final String name,
+            final float[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>float</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeFloat(final String objectPath, final float value);
+
+    /**
+     * Writes out a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeFloatArray(final String objectPath, final float[] data);
+
+    /**
+     * Writes out a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatArray(final String objectPath, final float[] data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5FloatStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatArray(final String objectPath, final int size,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the float array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is
+     *            <code>HDF5FloatStorageFeature.FLOATNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatArray(final String objectPath, final long size, final int blockSize,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>float</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createFloatArray(String, long, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createFloatArray(String, long, int, HDF5FloatStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatArrayBlock(final String objectPath, final float[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>float</code> array (of rank 1). The data set needs to have been
+     * created by {@link #createFloatArray(String, long, int, HDF5FloatStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeFloatArrayBlock(String, float[], long)} if the total
+     * size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createFloatArray(String, long, int, HDF5FloatStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatArrayBlockWithOffset(final String objectPath, final float[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeFloatMatrix(final String objectPath, final float[][] data);
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMatrix(final String objectPath, final float[][] data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMatrix(final String objectPath, final int blockSizeX,
+            final int blockSizeY);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the float matrix to create.
+     * @param sizeY The size of the y dimension of the float matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the float matrix to create.
+     * @param sizeY The size of the y dimension of the float matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMatrixBlock(final String objectPath, final float[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeFloatMatrixBlock(String, float[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMatrixBlockWithOffset(final String objectPath, final float[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>float</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeFloatMatrixBlock(String, float[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createFloatMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMatrixBlockWithOffset(final String objectPath, final float[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMDArray(final String objectPath, final MDFloatArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMDArray(final String objectPath, final MDFloatArray data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the float array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5FloatStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMDArray(final String objectPath, final int[] dimensions,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void createFloatMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMDArrayBlock(final String objectPath, final MDFloatArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMDArrayBlockWithOffset(final String objectPath, final MDFloatArray data,
+            final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>float</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float32()}.
+     */
+    @Deprecated
+    public void writeFloatMDArrayBlockWithOffset(final String objectPath, final MDFloatArray data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Double
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>double</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void setDoubleAttribute(final String objectPath, final String name, final double value);
+
+    /**
+     * Set a <code>double[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void setDoubleArrayAttribute(final String objectPath, final String name,
+            final double[] value);
+
+    /**
+     * Set a multi-dimensional code>double</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void setDoubleMDArrayAttribute(final String objectPath, final String name,
+            final MDDoubleArray value);
+
+    /**
+     * Set a <code>double[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void setDoubleMatrixAttribute(final String objectPath, final String name,
+            final double[][] value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>double</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeDouble(final String objectPath, final double value);
+
+    /**
+     * Writes out a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeDoubleArray(final String objectPath, final double[] data);
+
+    /**
+     * Writes out a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleArray(final String objectPath, final double[] data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5FloatStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleArray(final String objectPath, final int size,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the double array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is
+     *            <code>HDF5FloatStorageFeature.FLOATNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleArray(final String objectPath, final long size, final int blockSize,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>double</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createDoubleArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createDoubleArray(String, long, int, HDF5FloatStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleArrayBlock(final String objectPath, final double[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>double</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createDoubleArray(String, long, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeDoubleArrayBlock(String, double[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createDoubleArray(String, long, int, HDF5FloatStorageFeatures)} call that was used to
+     * create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleArrayBlockWithOffset(final String objectPath, final double[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeDoubleMatrix(final String objectPath, final double[][] data);
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMatrix(final String objectPath, final double[][] data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2). The initial size of the matrix is 0.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMatrix(final String objectPath, final int blockSizeX,
+            final int blockSizeY);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the double matrix to create.
+     * @param sizeY The size of the y dimension of the double matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY);
+
+    /**
+     * Creates a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the double matrix to create.
+     * @param sizeY The size of the y dimension of the double matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the size of <var>data</var> in this method should match
+     * the <var>blockSizeX/Y</var> arguments of the
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMatrixBlock(final String objectPath, final double[][] data,
+            final long blockNumberX, final long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeDoubleMatrixBlock(String, double[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMatrixBlockWithOffset(final String objectPath, final double[][] data,
+            final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a block of a <code>double</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeDoubleMatrixBlock(String, double[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createDoubleMatrix(String, long, long, int, int, HDF5FloatStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMatrixBlockWithOffset(final String objectPath, final double[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeDoubleMDArray(final String objectPath, final MDDoubleArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMDArray(final String objectPath, final MDDoubleArray data,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the double array to create. This will be the total
+     *            dimensions for non-extendable data sets and the dimensions of one chunk (extent
+     *            along each axis) for extendable (chunked) data sets. For extendable data sets the
+     *            initial size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array. Will be the total dimensions for
+     *            non-extendable data sets and the dimensions of one chunk for extendable (chunked)
+     *            data sets For extendable data sets the initial size of the array along each axis
+     *            will be 0, see {@link HDF5FloatStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMDArray(final String objectPath, final int[] dimensions,
+            final HDF5FloatStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void createDoubleMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5FloatStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMDArrayBlock(final String objectPath, final MDDoubleArray data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMDArrayBlockWithOffset(final String objectPath,
+            final MDDoubleArray data, final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>double</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#float64()}.
+     */
+    @Deprecated
+    public void writeDoubleMDArrayBlockWithOffset(final String objectPath,
+            final MDDoubleArray data, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset);
+
+    // *********************
+    // String
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets a string attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringAttribute(final String objectPath, final String name, final String value);
+
+    /**
+     * Sets a string attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of the value.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringAttribute(final String objectPath, final String name, final String value,
+            final int maxLength);
+
+    /**
+     * Sets a string array attribute on the referenced object. The length of the array is taken to
+     * be the longest string in <var>value</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringArrayAttribute(final String objectPath, final String name,
+            final String[] value);
+
+    /**
+     * Sets a string array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of any element in <var>value</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringArrayAttribute(final String objectPath, final String name,
+            final String[] value, final int maxLength);
+
+    /**
+     * Sets a multi-dimensional string array attribute on the referenced object. The length of the
+     * array is taken to be the longest string in <var>value</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringMDArrayAttribute(final String objectPath, final String name,
+            final MDArray<String> value);
+
+    /**
+     * Sets a multi-dimensional string array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of the value.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringMDArrayAttribute(final String objectPath, final String name,
+            final MDArray<String> value, final int maxLength);
+
+    /**
+     * Sets a string attribute with variable length on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void setStringAttributeVariableLength(final String objectPath, final String name,
+            final String value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of the <var>data</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeString(final String objectPath, final String data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length (which is the length of the
+     * string <var>data</var>).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeString(final String objectPath, final String data);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeString(final String objectPath, final String data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of the <var>data</var>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeString(final String objectPath, final String data, final int maxLength,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringArray(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeStringArray(final String objectPath, final String[] data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringArray(final String objectPath, final String[] data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringArray(final String objectPath, final String[] data, final int maxLength,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArray(final String objectPath, final MDArray<String> data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArray(final String objectPath, final MDArray<String> data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringArray(final String objectPath, final int maxLength, final int size);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the String array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringArray(final String objectPath, final int maxLength, final int size,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the String array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank 1). The data set needs to have
+     * been created by
+     * {@link #createStringArray(String, int, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createStringArray(String, int, long, int, HDF5GenericStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringArrayBlock(final String objectPath, final String[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank 1). The data set needs to have
+     * been created by
+     * {@link #createStringArray(String, int, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeStringArrayBlock(String, String[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createStringArray(String, int, long, int, HDF5GenericStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringArrayBlockWithOffset(final String objectPath, final String[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block in each dimension (for block-wise IO). Ignored if no
+     *            extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block in each dimension (for block-wise IO). Ignored if no
+     *            extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank N). The data set needs to have
+     * been created by
+     * {@link #createStringMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createStringMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArrayBlock(final String objectPath, final MDArray<String> data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank N). The data set needs to have
+     * been created by
+     * {@link #createStringMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createStringMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> data, final long[] offset);
+
+    /**
+     * Writes out a <code>String</code> with variable maximal length.
+     * <p>
+     * The advantage of this method over {@link #writeString(String, String)} is that when writing a
+     * new string later it can have a different (also greater) length. The disadvantage is that it
+     * it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringVariableLength(final String objectPath, final String data);
+
+    /**
+     * Writes out a <code>String[]</code> where each String of the array has a variable maximal
+     * length.
+     * <p>
+     * The advantage of this method over {@link #writeStringArray(String, String[])} is that when
+     * writing a new string later it can have a different (also greater) length. The disadvantage is
+     * that it it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringVariableLengthArray(final String objectPath, final String[] data);
+
+    /**
+     * Writes out a <code>String[]</code> where each String of the array has a variable maximal
+     * length.
+     * <p>
+     * The advantage of this method over {@link #writeStringArray(String, String[])} is that when
+     * writing a new string later it can have a different (also greater) length. The disadvantage is
+     * that it it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringVariableLengthArray(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthArray(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The intial size of the array.
+     * @param blockSize The size of block in the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthArray(final String objectPath, final long size,
+            final int blockSize);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The initial size of the array.
+     * @param blockSize The size of block in the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthArray(final String objectPath, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthArray(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthMDArray(final String objectPath, final int[] dimensions,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param blockSize The size of a contiguously stored block (along each axis) in the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param blockSize The size of a contiguously stored block (along each axis) in the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void createStringVariableLengthMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockSize);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringVariableLengthMDArray(final String objectPath, final MDArray<String> data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#string()} instead.
+     */
+    @Deprecated
+    public void writeStringVariableLengthMDArray(final String objectPath,
+            final MDArray<String> data, final HDF5GenericStorageFeatures features);
+
+    // *********************
+    // Date & Time
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a date value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param date The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setDateAttribute(final String objectPath, final String attributeName,
+            final Date date);
+
+    /**
+     * Set a date array value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param dates The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setDateArrayAttribute(final String objectPath, final String attributeName,
+            final Date[] dates);
+
+    /**
+     * Set a time stamp value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeStamp The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setTimeStampAttribute(final String objectPath, final String attributeName,
+            final long timeStamp);
+
+    /**
+     * Set a time stamp array value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeStamps The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setTimeStampArrayAttribute(final String objectPath, final String attributeName,
+            final long[] timeStamps);
+
+    /**
+     * Set a time duration value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDuration The value of the attribute.
+     * @param timeUnit The unit of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setTimeDurationAttribute(final String objectPath, final String attributeName,
+            final long timeDuration, final HDF5TimeUnit timeUnit);
+
+    /**
+     * Set a time duration value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDuration The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setTimeDurationAttribute(final String objectPath, final String attributeName,
+            final HDF5TimeDuration timeDuration);
+
+    /**
+     * Set a time duration array value as attribute on the referenced object. The smallest time unit
+     * in <var>timeDurations</var> will be used as the time unit of the array.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * <p>
+     * <em>Note: Time durations are stored as a <code>long[]</code> array.</em>
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDurations The value of the attribute.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void setTimeDurationArrayAttribute(final String objectPath, final String attributeName,
+            final HDF5TimeDurationArray timeDurations);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a time stamp value. The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamp The timestamp to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeStamp(final String objectPath, final long timeStamp);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The length of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeStampArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeStampArray(final String objectPath, final int size);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The length of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeStampArray(final String objectPath, final long size, final int blockSize,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a time stamp array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeStampArray(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a time stamp array (of rank 1). The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamps The timestamps to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeStampArray(final String objectPath, final long[] timeStamps);
+
+    /**
+     * Writes out a time stamp array (of rank 1). The data set will be tagged as type variant
+     * {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeStamps The timestamps to write as number of milliseconds since January 1, 1970,
+     *            00:00:00 GMT.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeStampArray(final String objectPath, final long[] timeStamps,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a time stamp array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeStampArray(String, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link IHDF5LongWriter#createArray(String, long, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeStampArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a time stamp array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeStampArray(String, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeTimeStampArrayBlock(String, long[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link IHDF5LongWriter#createArray(String, long, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeStampArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Writes out a time stamp value provided as a {@link Date}.
+     * <p>
+     * <em>Note: The time stamp is stored as <code>long</code> array and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param date The date to write.
+     * @see #writeTimeStamp(String, long)
+     */
+    public void writeDate(final String objectPath, final Date date);
+
+    /**
+     * Writes out a {@link Date} array (of rank 1).
+     * <p>
+     * <em>Note: Time stamps are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dates The dates to write.
+     * @see #writeTimeStampArray(String, long[])
+     */
+    public void writeDateArray(final String objectPath, final Date[] dates);
+
+    /**
+     * Writes out a {@link Date} array (of rank 1).
+     * <p>
+     * <em>Note: Time date is stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dates The dates to write.
+     * @param features The storage features of the data set.
+     * @see #writeTimeStampArray(String, long[], HDF5GenericStorageFeatures)
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeDateArray(final String objectPath, final Date[] dates,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a time duration value in seconds.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long</code> and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write in seconds.
+     * @deprecated Use {@link IHDF5TimeDurationWriter#write(String, HDF5TimeDuration)} instead.
+     */
+    @Deprecated
+    public void writeTimeDuration(final String objectPath, final long timeDuration);
+
+    /**
+     * Writes out a time duration value.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write in the given <var>timeUnit</var>.
+     * @param timeUnit The unit of the time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeDuration(final String objectPath, final long timeDuration,
+            final HDF5TimeUnit timeUnit);
+
+    /**
+     * Writes out a time duration value.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write.
+     */
+    public void writeTimeDuration(final String objectPath, final HDF5TimeDuration timeDuration);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @param timeUnit The unit of the time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeDurationArray(final String objectPath, final int size,
+            final HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param timeUnit The unit of the time duration.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeDurationArray(final String objectPath, final long size,
+            final int blockSize, final HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param timeUnit The unit of the time duration.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeDurationArray(final String objectPath, final long size,
+            final int blockSize, final HDF5TimeUnit timeUnit,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param timeUnit The unit of the time duration.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void createTimeDurationArray(final String objectPath, final int size,
+            final HDF5TimeUnit timeUnit, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a time duration array in seconds (of rank 1).
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write in seconds.
+     * @deprecated Use {@link IHDF5TimeDurationWriter#writeArray(String, HDF5TimeDurationArray)}
+     *             instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write in the given <var>timeUnit</var>.
+     * @param timeUnit The unit of the time duration.
+     * @deprecated Use {@link IHDF5TimeDurationWriter#writeArray(String, HDF5TimeDurationArray)}
+     *             instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations,
+            final HDF5TimeUnit timeUnit);
+
+    /**
+     * Writes out a time duration array (of rank 1). The smallest time unit in
+     * <var>timeDurations</var> will be used as the time unit of the array.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write in the given <var>timeUnit</var>. The array
+     *            will be stored in the smallest time unit, durations given in larger time units
+     *            will be converted.
+     * @deprecated Use {@link IHDF5TimeDurationWriter#writeArray(String, HDF5TimeDurationArray)} and
+     *             {@link HDF5TimeDurationArray#create(HDF5TimeDuration...)} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDuration[] timeDurations);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write in the given <var>timeUnit</var>.
+     * @param timeUnit The unit of the time duration.
+     * @param features The storage features of the data set.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArray(String, HDF5TimeDurationArray, HDF5IntStorageFeatures)
+     *             )} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath, final long[] timeDurations,
+            final HDF5TimeUnit timeUnit, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write in the given <var>timeUnit</var>. The array
+     *            will be stored in the smallest time unit, durations given in larger time units
+     *            will be converted.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArray(String, HDF5TimeDurationArray, HDF5IntStorageFeatures)}
+     *             and {@link HDF5TimeDurationArray#create(HDF5TimeDuration...)} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDuration[] timeDurations, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write.
+     */
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDurationArray timeDurations);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write.
+     * @param features The storage features used to store the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDurationArray timeDurations, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a time duration array. The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlock(final String objectPath,
+            final HDF5TimeDurationArray data, final long blockNumber);
+
+    /**
+     * Writes out a block of a time duration array. The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #writeTimeDurationArrayBlock(String, long[], long, HDF5TimeUnit)} if the total size of
+     * the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#time()} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDurationArray data, final int dataSize, final long offset);
+
+    /**
+     * Writes out a block of a time duration array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArrayBlock(String, HDF5TimeDurationArray, long)}
+     *             and {@link HDF5TimeDurationArray#create(HDF5TimeUnit, long...)} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlock(final String objectPath, final long[] data,
+            final long blockNumber, final HDF5TimeUnit timeUnit);
+
+    /**
+     * Writes out a block of a time duration array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #writeTimeDurationArrayBlock(String, long[], long, HDF5TimeUnit)} if the total size of
+     * the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArrayBlockWithOffset(String, HDF5TimeDurationArray, int, long)}
+     *             and {@link HDF5TimeDurationArray#create(HDF5TimeUnit, long[])} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlockWithOffset(final String objectPath, final long[] data,
+            final int dataSize, final long offset, final HDF5TimeUnit timeUnit);
+
+    /**
+     * Writes out a block of a time duration array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArrayBlock(String, HDF5TimeDurationArray, long)}
+     *             and {@link HDF5TimeDurationArray#create(HDF5TimeDuration[])} instead.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlock(final String objectPath, final HDF5TimeDuration[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a time duration array (which is stored as a <code>long</code> array of
+     * rank 1). The data set needs to have been created by
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #writeTimeDurationArrayBlock(String, long[], long, HDF5TimeUnit)} if the total size of
+     * the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createTimeDurationArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)}
+     * call that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use
+     *             {@link IHDF5TimeDurationWriter#writeArrayBlockWithOffset(String, HDF5TimeDurationArray, int, long)}
+     *             and {@link HDF5TimeDurationArray#create(HDF5TimeDuration[])}.
+     */
+    @Deprecated
+    public void writeTimeDurationArrayBlockWithOffset(final String objectPath,
+            final HDF5TimeDuration[] data, final int dataSize, final long offset);
+
+    // *********************
+    // Reference
+    // *********************
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets an object reference attribute to the referenced object.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and <var>referencedObjectPath</var>
+     * must exist, that is it need to have been written before by one of the <code>write()</code> or
+     * <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPath The path of the object to reference.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void setObjectReferenceAttribute(final String objectPath, final String name,
+            final String referencedObjectPath);
+
+    /**
+     * Sets a 1D object reference array attribute to referenced objects.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and all
+     * <var>referencedObjectPaths</var> must exist, that is it need to have been written before by
+     * one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPaths The paths of the objects to reference.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void setObjectReferenceArrayAttribute(final String objectPath, final String name,
+            final String[] referencedObjectPaths);
+
+    /**
+     * Sets an object reference array attribute to referenced objects.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and all
+     * <var>referencedObjectPaths</var> must exist, that is it need to have been written before by
+     * one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPaths The paths of the objects to reference.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void setObjectReferenceMDArrayAttribute(final String objectPath, final String name,
+            final MDArray<String> referencedObjectPaths);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes an object reference to the referenced object.
+     * <p>
+     * The object referenced with <var>referencedObjectPath</var> must exist, that is it need to
+     * have been written before by one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to write.
+     * @param referencedObjectPath The path of the object to reference.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReference(String objectPath, String referencedObjectPath);
+
+    /**
+     * Writes an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPath The names of the object to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceArray(final String objectPath,
+            final String[] referencedObjectPath);
+
+    /**
+     * Writes an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPath The names of the object to write.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceArray(final String objectPath,
+            final String[] referencedObjectPath, final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceArray(final String objectPath, final int size);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceArray(final String objectPath, final long size,
+            final int blockSize);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceArray(final String objectPath, final long size,
+            final int blockSize, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of an array (of rank 1) of object references. The data set needs to have
+     * been created by
+     * {@link #createObjectReferenceArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createObjectReferenceArray(String, long, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the referenced objects to write. The length defines
+     *            the block size. Must not be <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceArrayBlock(final String objectPath,
+            final String[] referencedObjectPaths, final long blockNumber);
+
+    /**
+     * Writes out a block of an array (of rank 1) of object references. The data set needs to have
+     * been created by
+     * {@link #createObjectReferenceArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createObjectReferenceArray(String, long, int, HDF5IntStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the referenced objects to write. The length defines
+     *            the block size. Must not be <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceArrayBlockWithOffset(final String objectPath,
+            final String[] referencedObjectPaths, final int dataSize, final long offset);
+
+    /**
+     * Writes an array (of rank N) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The names of the object to write.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceMDArray(final String objectPath,
+            final MDArray<String> referencedObjectPaths);
+
+    /**
+     * Writes an array (of rank N) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The names of the object to write.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceMDArray(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array to create. This will be the total dimensions
+     *            for non-extendable data sets and the dimensions of one chunk (extent along each
+     *            axis) for extendable (chunked) data sets. For extendable data sets the initial
+     *            size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array to create. This will be the total dimensions
+     *            for non-extendable data sets and the dimensions of one chunk (extent along each
+     *            axis) for extendable (chunked) data sets. For extendable data sets the initial
+     *            size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void createObjectReferenceMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>. All columns need to have the same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceMDArrayBlock(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     * @deprecated Use the corresponding method in {@link IHDF5Writer#reference()}.
+     */
+    @Deprecated
+    public void writeObjectReferenceMDArrayBlockWithOffset(final String objectPath,
+            final MDLongArray referencedObjectPaths, final int[] blockDimensions,
+            final long[] offset, final int[] memoryOffset);
+
+    // *********************
+    // Enums
+    // *********************
+
+    /**
+     * Returns the full writer for enums.
+     * 
+     * @deprecated Use {@link IHDF5Writer#enumeration()} instead.
+     */
+    @Deprecated
+    public IHDF5EnumWriter enums();
+
+    // *********************
+    // Compounds
+    // *********************
+
+    /**
+     * Returns the full writer for compounds.
+     * 
+     * @deprecated Use {@link IHDF5Writer#compound()} instead.
+     */
+    @Deprecated
+    public IHDF5CompoundWriter compounds();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5LongReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5LongReader.java
new file mode 100644
index 0000000..a13da63
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5LongReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface that provides methods for reading <code>long</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5LongReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>long</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public long getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>long[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public long[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>long</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDLongArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>long</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public long[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>long</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public long read(String objectPath);
+
+    /**
+     * Reads a <code>long</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public long[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDLongArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>long</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDLongArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>long</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public long[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>long</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public long[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public long[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public long[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public long[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDLongArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>long</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>long</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDLongArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<long[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDLongArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5LongWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5LongWriter.java
new file mode 100644
index 0000000..6943c4e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5LongWriter.java
@@ -0,0 +1,586 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface that provides methods for writing <code>long</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * <p>   
+ * <i>Note:</i> If you need to convert from and to unsigned values, use the methods of 
+ * {@link UnsignedIntUtils}.
+ * 
+ * @author Bernd Rinn
+ */
+ // Note: It is a trick for keeping backward compatibility to let this interface extend 
+ // IHDF5UnsignedLongWriter instead of IHDF5LongReader as it logically should.
+ // Once we remove IHDF5UnsignedLongWriter, uncomment the following line and remove
+ // all @Override annotations and we are fine again.
+//public interface IHDF5LongWriter extends IHDF5LongReader
+ at SuppressWarnings("deprecation")
+public interface IHDF5LongWriter extends IHDF5UnsignedLongWriter
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setAttr(String objectPath, String name, long value);
+
+    /**
+     * Set a <code>long[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setArrayAttr(String objectPath, String name, long[] value);
+
+    /**
+     * Set a multi-dimensional code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, MDLongArray value);
+
+    /**
+     * Set a <code>long[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMatrixAttr(String objectPath, String name, long[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>long</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    @Override
+    public void write(String objectPath, long value);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    @Override
+    public void writeArray(String objectPath, long[] data);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeArray(String objectPath, long[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    @Override
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>long</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    @Override
+    public void writeArrayBlock(String objectPath, long[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, long[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    @Override
+    public void writeArrayBlockWithOffset(String objectPath, long[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(String objectPath, long[][] data);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMatrix(String objectPath, long[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    @Override
+    public void writeMatrixBlock(String objectPath, long[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, long[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, long[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, long[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, long[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDLongArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDLongArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>long</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDLongArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>long</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDLongArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>long</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    @Override
+    public void writeMDArrayBlock(String objectPath, MDLongArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>long</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDLongArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>long</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDLongArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>long</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>long</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadOnlyInfoProviderHandler.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadOnlyInfoProviderHandler.java
new file mode 100644
index 0000000..c0638bb
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadOnlyInfoProviderHandler.java
@@ -0,0 +1,421 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.List;
+
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+
+/**
+ * An interface for getting information on HDF5 objects like links, groups, data sets and data
+ * types.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ObjectReadOnlyInfoProviderHandler
+{
+    // /////////////////////
+    // Objects & Links
+    // /////////////////////
+
+    /**
+     * Returns the link information for the given <var>objectPath</var>. If <var>objectPath</var>
+     * does not exist, the link information will have a type {@link HDF5ObjectType#NONEXISTENT}.
+     */
+    public HDF5LinkInformation getLinkInformation(final String objectPath);
+
+    /**
+     * Returns the object information for the given <var>objectPath</var>. If <var>objectPath</var>
+     * is a symbolic link, this method will return the type of the object that this link points to
+     * rather than the type of the link. If <var>objectPath</var> does not exist, the object
+     * information will have a type {@link HDF5ObjectType#NONEXISTENT} and the other fields will not
+     * be set.
+     */
+    public HDF5ObjectInformation getObjectInformation(final String objectPath);
+
+    /**
+     * Returns the type of the given <var>objectPath<var>. If <var>followLink</var> is
+     * <code>false</code> and <var>objectPath<var> is a symbolic link, this method will return the
+     * type of the link rather than the type of the object that the link points to.
+     */
+    public HDF5ObjectType getObjectType(final String objectPath, boolean followLink);
+
+    /**
+     * Returns the type of the given <var>objectPath</var>. If <var>objectPath</var> is a symbolic
+     * link, this method will return the type of the object that this link points to rather than the
+     * type of the link, that is, it will follow symbolic links.
+     */
+    public HDF5ObjectType getObjectType(final String objectPath);
+
+    /**
+     * Returns <code>true</code>, if <var>objectPath</var> exists and <code>false</code> otherwise.
+     * if <var>followLink</var> is <code>false</code> and <var>objectPath</var> is a symbolic link,
+     * this method will return <code>true</code> regardless of whether the link target exists or
+     * not.
+     */
+    public boolean exists(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code>, if <var>objectPath</var> exists and <code>false</code> otherwise.
+     * If <var>objectPath</var> is a symbolic link, the method will return <code>true</code> if the
+     * link target exists, that is, this method will follow symbolic links.
+     */
+    public boolean exists(final String objectPath);
+
+    /**
+     * Creates and returns an internal (house-keeping) version of <var>objectPath</var>.
+     */
+    public String toHouseKeepingPath(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if <var>objectPath</var> denotes an internal (house-keeping)
+     * object.
+     */
+    public boolean isHouseKeepingObject(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a group and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a group.
+     */
+    public boolean isGroup(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a group and
+     * <code>false</code> otherwise. Note that if <var>objectPath</var> is a symbolic link, this
+     * method will return <code>true</code> if the link target of the symbolic link is a group, that
+     * is, this method will follow symbolic links.
+     */
+    public boolean isGroup(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data set and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a data set.
+     */
+    public boolean isDataSet(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data set and
+     * <code>false</code> otherwise. Note that if <var>objectPath</var> is a symbolic link, this
+     * method will return <code>true</code> if the link target of the symbolic link is a data set,
+     * that is, this method will follow symbolic links.
+     */
+    public boolean isDataSet(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data type and
+     * <code>false</code> otherwise. Note that if <var>followLink</var> is <code>false</code> this
+     * method will return <code>false</code> if <var>objectPath</var> is a symbolic link that points
+     * to a data type.
+     */
+    public boolean isDataType(final String objectPath, boolean followLink);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a data type and
+     * <code>false</code> otherwise. Note that if <var>objectPath</var> is a symbolic link, this
+     * method will return <code>true</code> if the link target of the symbolic link is a data type,
+     * that is, this method will follow symbolic links.
+     */
+    public boolean isDataType(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a soft link and
+     * <code>false</code> otherwise.
+     */
+    public boolean isSoftLink(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents an external link
+     * and <code>false</code> otherwise.
+     */
+    public boolean isExternalLink(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents either a soft
+     * link or an external link and <code>false</code> otherwise.
+     */
+    public boolean isSymbolicLink(final String objectPath);
+
+    /**
+     * Returns the target of the symbolic link that <var>objectPath</var> points to, or
+     * <code>null</code>, if <var>objectPath</var> is not a symbolic link.
+     */
+    public String tryGetSymbolicLinkTarget(final String objectPath);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the <var>objectPath</var> has an attribute with name
+     * <var>attributeName</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return <code>true</code>, if the attribute exists for the object.
+     */
+    public boolean hasAttribute(final String objectPath, final String attributeName);
+
+    /**
+     * Returns the names of the attributes of the given <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the object (data set or group) to
+     *            return the attributes for.
+     */
+    public List<String> getAttributeNames(final String objectPath);
+
+    /**
+     * Returns the names of all attributes of the given <var>objectPath</var>.
+     * <p>
+     * This may include attributes that are used internally by the library and are not supposed to
+     * be changed by application programmers.
+     * 
+     * @param objectPath The name (including path information) of the object (data set or group) to
+     *            return the attributes for.
+     */
+    public List<String> getAllAttributeNames(final String objectPath);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object.
+     * 
+     * @param objectPath The name (including path information) of the object that has the attribute
+     *            to return information about.
+     * @param attributeName The name of the attribute to get information about.
+     */
+    public HDF5DataTypeInformation getAttributeInformation(final String objectPath,
+            final String attributeName);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataTypeInformation} object.
+     * 
+     * @param objectPath The name (including path information) of the object that has the attribute
+     *            to return information about.
+     * @param attributeName The name of the attribute to get information about.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     */
+    public HDF5DataTypeInformation getAttributeInformation(final String objectPath,
+            final String attributeName, final DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataSetInformation} object. It is a
+     * failure condition if the <var>objectPath</var> does not exist or does not identify a data
+     * set.
+     * 
+     * @param objectPath The name (including path information) of the data set to return information
+     *            about.
+     */
+    public HDF5DataSetInformation getDataSetInformation(final String objectPath);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataSetInformation} object. It is a
+     * failure condition if the <var>objectPath</var> does not exist or does not identify a data
+     * set.
+     * 
+     * @param objectPath The name (including path information) of the data set to return information
+     *            about.
+     * @param dataTypeInfoOptions The options on which information to get about the member data
+     *            types.
+     */
+    public HDF5DataSetInformation getDataSetInformation(final String objectPath,
+            final DataTypeInfoOptions dataTypeInfoOptions);
+
+    /**
+     * Returns the total size (in bytes) of <var>objectPath</var>. It is a failure condition if the
+     * <var>objectPath</var> does not exist or does not identify a data set. This method follows
+     * symbolic links.
+     */
+    public long getSize(final String objectPath);
+
+    /**
+     * Returns the total number of elements of <var>objectPath</var>. It is a failure condition if
+     * the <var>objectPath</var> does not exist or does not identify a data set. This method follows
+     * symbolic links.
+     */
+    public long getNumberOfElements(final String objectPath);
+
+    /**
+     * Returns the size of one element of <var>objectPath</var>. It is a failure condition if the
+     * <var>objectPath</var> does not exist or does not identify a data set. This method follows
+     * symbolic links.
+     */
+    public int getElementSize(final String objectPath);
+
+    /**
+     * Returns the rank of the space of <var>objectPath</var> (0 if this is a scalar space). It is a
+     * failure condition if the <var>objectPath</var> does not exist or does not identify a data
+     * set. This method follows symbolic links.
+     */
+    public int getSpaceRank(final String objectPath);
+
+    /**
+     * Returns the dimensions of the space of <var>objectPath</var> (empty if this is a scalar
+     * space). It is a failure condition if the <var>objectPath</var> does not exist or does not
+     * identify a data set. This method follows symbolic links.
+     */
+    public long[] getSpaceDimensions(final String objectPath);
+
+    /**
+     * Returns the rank of the array of <var>objectPath</var> (0 if this is no array type). It is a
+     * failure condition if the <var>objectPath</var> does not exist or does not identify a data
+     * set. This method follows symbolic links.
+     */
+    public int getArrayRank(final String objectPath);
+
+    /**
+     * Returns the dimensions of <var>objectPath</var>(empty if this isno array type). It is a
+     * failure condition if the <var>objectPath</var> does not exist or does not identify a data
+     * set. This method follows symbolic links.
+     */
+    public int[] getArrayDimensions(final String objectPath);
+
+    /**
+     * Returns the rank of this data set of <var>objectPath</var>. This combines the space rank and
+     * the array rank into one rank. It is a failure condition if the <var>objectPath</var> does not
+     * exist or does not identify a data set. This method follows symbolic links.
+     */
+    public int getRank(final String objectPath);
+
+    /**
+     * Returns the dimensions of <var>objectPath</var>. This combines the space dimensions and the
+     * array dimensions into one rank. It is a failure condition if the <var>objectPath</var> does
+     * not exist or does not identify a data set. This method follows symbolic links.
+     */
+    public long[] getDimensions(final String objectPath);
+
+    // /////////////////////
+    // Copies
+    // /////////////////////
+
+    /**
+     * Copies the <var>sourceObject</var> to the <var>destinationObject</var> of the HDF5 file
+     * represented by the <var>destinationWriter</var>. If <var>destiantionObject</var> ends with
+     * "/", it will be considered a group and the name of <var>sourceObject</var> will be appended.
+     */
+    public void copy(String sourceObject, IHDF5Writer destinationWriter, String destinationObject);
+
+    /**
+     * Copies the <var>sourceObject</var> to the root group of the HDF5 file represented by the
+     * <var>destinationWriter</var>.
+     */
+    public void copy(String sourceObject, IHDF5Writer destinationWriter);
+
+    /**
+     * Copies all objects of the file represented by this reader to the root group of the HDF5 file
+     * represented by the <var>destinationWriter</var>.
+     */
+    public void copyAll(IHDF5Writer destinationWriter);
+
+    // /////////////////////
+    // Groups
+    // /////////////////////
+
+    /**
+     * Returns the members of <var>groupPath</var>. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<String> getGroupMembers(final String groupPath);
+
+    /**
+     * Returns all members of <var>groupPath</var>, including internal groups that may be used by
+     * the library to do house-keeping. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<String> getAllGroupMembers(final String groupPath);
+
+    /**
+     * Returns the paths of the members of <var>groupPath</var> (including the parent). The order is
+     * <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the member paths for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<String> getGroupMemberPaths(final String groupPath);
+
+    /**
+     * Returns the link information about the members of <var>groupPath</var>. The order is
+     * <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @param readLinkTargets If <code>true</code>, for symbolic links the link targets will be
+     *            available via {@link HDF5LinkInformation#tryGetSymbolicLinkTarget()}.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<HDF5LinkInformation> getGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets);
+
+    /**
+     * Returns the link information about all members of <var>groupPath</var>. The order is
+     * <i>not</i> well defined.
+     * <p>
+     * This may include attributes that are used internally by the library and are not supposed to
+     * be changed by application programmers.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @param readLinkTargets If <code>true</code>, the link targets will be read for symbolic
+     *            links.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<HDF5LinkInformation> getAllGroupMemberInformation(final String groupPath,
+            boolean readLinkTargets);
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Returns the data type variant of <var>objectPath</var>, or <code>null</code>, if no type
+     * variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data type variant or <code>null</code>.
+     */
+    public HDF5DataTypeVariant tryGetTypeVariant(final String objectPath);
+
+    /**
+     * Returns the data type variant of <var>attributeName</var> of object <var>objectPath</var>, or
+     * <code>null</code>, if no type variant is defined for this <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data type variant or <code>null</code>.
+     */
+    public HDF5DataTypeVariant tryGetTypeVariant(final String objectPath, String attributeName);
+
+    /**
+     * Returns the path of the data type of the data set <var>objectPath</var>, or <code>null</code>
+     * , if this data set is not of a named data type.
+     */
+    public String tryGetDataTypePath(final String objectPath);
+
+    /**
+     * Returns the path of the data <var>type</var>, or <code>null</code>, if <var>type</var> is not
+     * a named data type.
+     */
+    public String tryGetDataTypePath(HDF5DataType type);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadWriteInfoProviderHandler.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadWriteInfoProviderHandler.java
new file mode 100644
index 0000000..c861927
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ObjectReadWriteInfoProviderHandler.java
@@ -0,0 +1,237 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SymbolTableException;
+
+/**
+ * An interface for getting information on and handling HDF5 objects like links, groups, data sets
+ * and data types.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ObjectReadWriteInfoProviderHandler extends IHDF5ObjectReadOnlyInfoProviderHandler
+{
+    // /////////////////////
+    // Links
+    // /////////////////////
+
+    /**
+     * Creates a hard link.
+     * 
+     * @param currentPath The name of the data set (including path information) to create a link to.
+     * @param newPath The name (including path information) of the link to create.
+     */
+    public void createHardLink(String currentPath, String newPath);
+
+    /**
+     * Creates a soft link.
+     * 
+     * @param targetPath The name of the data set (including path information) to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     */
+    public void createSoftLink(String targetPath, String linkPath);
+
+    /**
+     * Creates or updates a soft link.
+     * <p>
+     * <em>Note: This method will never overwrite a data set, but only a symbolic link.</em>
+     * 
+     * @param targetPath The name of the data set (including path information) to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     */
+    public void createOrUpdateSoftLink(String targetPath, String linkPath);
+
+    /**
+     * Creates an external link, that is a link to a data set in another HDF5 file, the
+     * <em>target</em> .
+     * <p>
+     * <em>Note: This method is only allowed when the {@link IHDF5WriterConfigurator} was not 
+     * configured to enforce strict HDF5 1.6 compatibility.</em>
+     * 
+     * @param targetFileName The name of the file where the data set resides that should be linked.
+     * @param targetPath The name of the data set (including path information) in the
+     *            <var>targetFileName</var> to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @throws IllegalStateException If the {@link IHDF5WriterConfigurator} was configured to
+     *             enforce strict HDF5 1.6 compatibility.
+     */
+    public void createExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException;
+
+    /**
+     * Creates or updates an external link, that is a link to a data set in another HDF5 file, the
+     * <em>target</em> .
+     * <p>
+     * <em>Note: This method will never overwrite a data set, but only a symbolic link.</em>
+     * <p>
+     * <em>Note: This method is only allowed when the {@link IHDF5WriterConfigurator} was not 
+     * configured to enforce strict HDF5 1.6 compatibility.</em>
+     * 
+     * @param targetFileName The name of the file where the data set resides that should be linked.
+     * @param targetPath The name of the data set (including path information) in the
+     *            <var>targetFileName</var> to create a link to.
+     * @param linkPath The name (including path information) of the link to create.
+     * @throws IllegalStateException If the {@link IHDF5WriterConfigurator} was configured to
+     *             enforce strict HDF5 1.6 compatibility.
+     */
+    public void createOrUpdateExternalLink(String targetFileName, String targetPath, String linkPath)
+            throws IllegalStateException;
+
+    /**
+     * Removes an object from the file. If there is more than one link to the object, only the
+     * specified link will be removed.
+     */
+    public void delete(String objectPath);
+
+    /**
+     * Moves or renames a link in the file atomically.
+     * 
+     * @throws HDF5SymbolTableException If <var>oldLinkPath</var> does not exist or if
+     *             <var>newLinkPath</var> already exists.
+     */
+    public void move(String oldLinkPath, String newLinkPath) throws HDF5SymbolTableException;
+
+    // /////////////////////
+    // Group
+    // /////////////////////
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * 
+     * @param groupPath The path of the group to create.
+     */
+    public void createGroup(final String groupPath);
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file, giving the library a hint
+     * about the size (<var>sizeHint</var>). If you have this information in advance, it will be
+     * more efficient to tell it the library rather than to let the library figure out itself, but
+     * the hint must not be misunderstood as a limit.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * <p>
+     * <i>Note: This method creates an "old-style group", that is the type of group of HDF5 1.6 and
+     * earlier.</i>
+     * 
+     * @param groupPath The path of the group to create.
+     * @param sizeHint The estimated size of all group entries (in bytes).
+     */
+    public void createGroup(final String groupPath, final int sizeHint);
+
+    /**
+     * Creates a group with path <var>objectPath</var> in the HDF5 file, giving the library hints
+     * about when to switch between compact and dense. Setting appropriate values may improve
+     * performance.
+     * <p>
+     * All intermediate groups will be created as well, if they do not already exist.
+     * <p>
+     * <i>Note: This method creates a "new-style group", that is the type of group of HDF5 1.8 and
+     * above. Thus it will fail, if the writer is configured to enforce HDF5 1.6 compatibility.</i>
+     * 
+     * @param groupPath The path of the group to create.
+     * @param maxCompact When the group grows to more than this number of entries, the library will
+     *            convert the group style from compact to dense.
+     * @param minDense When the group shrinks below this number of entries, the library will convert
+     *            the group style from dense to compact.
+     */
+    public void createGroup(final String groupPath, final int maxCompact, final int minDense);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Sets the data set size of a one-dimensional data set to <var>newSize</var>. Note that this
+     * method can only be applied to extendable data sets.
+     * 
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not extendable.
+     */
+    public void setDataSetSize(final String objectPath, final long newSize);
+
+    /**
+     * Sets the data set size of a multi-dimensional data set to <var>newDimensions</var>. Note that
+     * this method can only be applied to extendable data sets.
+     * 
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not extendable.
+     */
+    public void setDataSetDimensions(final String objectPath, final long[] newDimensions);
+
+    // /////////////////////
+    // Types
+    // /////////////////////
+
+    /**
+     * Sets a <var>typeVariant</var> of object <var>objectPath</var>.
+     * 
+     * @param objectPath The name of the object to add the type variant to.
+     * @param typeVariant The type variant to add.
+     */
+    public void setTypeVariant(final String objectPath, final HDF5DataTypeVariant typeVariant);
+
+    /**
+     * Sets a <var>typeVariant</var> of attribute <var>attributeName</var> of object
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name of the object.
+     * @param attributeName The name of attribute to add the type variant to.
+     * @param typeVariant The type variant to add.
+     */
+    public void setTypeVariant(final String objectPath, final String attributeName,
+            final HDF5DataTypeVariant typeVariant);
+
+    /**
+     * Deletes the <var>typeVariant</var> from <var>objectPath</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to delete the type variant from.
+     */
+    public void deleteTypeVariant(final String objectPath);
+
+    /**
+     * Deletes the <var>typeVariant</var> from <var>attributeName</var> of <var>objectPath</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object.
+     * @param attributeName The name of the attribute to delete the type variant from.
+     */
+    public void deleteTypeVariant(final String objectPath, final String attributeName);
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Deletes an attribute.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to delete the attribute from.
+     * @param name The name of the attribute to delete.
+     */
+    public void deleteAttribute(final String objectPath, final String name);
+
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueReader.java
new file mode 100644
index 0000000..acd69a8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueReader.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface that provides methods for reading any data sets as byte arrays (as 'opaque data',
+ * just like ordinary file systems treat files). This is particularly useful for opaque data types,
+ * which are "black boxes" to the HDF5 library.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5OpaqueReader
+{
+
+    // /////////////////////////////
+    // Opaque tags and types
+    // /////////////////////////////
+
+    /**
+     * Returns the tag of the opaque data type associated with <var>objectPath</var>, or
+     * <code>null</code>, if <var>objectPath</var> is not of an opaque data type (i.e. if
+     * <code>reader.getDataSetInformation(objectPath).getTypeInformation().getDataClass() != HDF5DataClass.OPAQUE</code>
+     * ).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The tag of the opaque data type, or <code>null</code>.
+     */
+    public String tryGetOpaqueTag(final String objectPath);
+
+    /**
+     * Returns the opaque data type or <code>null</code>, if <var>objectPath</var> is not of such a
+     * data type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The opaque data type, or <code>null</code>.
+     */
+    public HDF5OpaqueType tryGetOpaqueType(final String objectPath);
+
+    // /////////////////////////////
+    // Reading as byte array
+    // /////////////////////////////
+
+    /**
+     * Gets the byte array values of an attribute <var>attributeName</var> of object
+     * </var>objectPath</var>. The bytes read will be in the native byte-order of the machine but
+     * will otherwise be unchanged.
+     */
+    public byte[] getArrayAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads the data set <var>objectPath</var> as byte array. The bytes read will be in the native
+     * byte-order of the machine and will be ordered 'row-first' in the case of multi-dimensional
+     * data sets, but will otherwise be unchanged.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public byte[] readArray(final String objectPath);
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array. The bytes read will be in
+     * the native byte-order of the machine, but will otherwise be unchanged.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set is not of rank 1 or is a String.
+     */
+    public byte[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array. The bytes read will be in
+     * the native byte-order of the machine, but will otherwise be unchanged.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param offset The offset of the block to read as number of elements (starting with 0).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public byte[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset) throws HDF5JavaException;
+
+    /**
+     * Reads a block from data set <var>objectPath</var> as byte array into <var>buffer</var>. The
+     * bytes read will be in the native byte-order of the machine, but will otherwise be unchanged.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param buffer The buffer to read the values in.
+     * @param blockSize The block size in numbers of elements (this will be the length of the
+     *            <code>byte[]</code> returned, divided by the size of one element).
+     * @param offset The offset of the block in the data set as number of elements (zero-based).
+     * @param memoryOffset The offset of the block in <var>buffer</var> as number of elements
+     *            (zero-based).
+     * @return The effective block size.
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public int readArrayToBlockWithOffset(final String objectPath, final byte[] buffer,
+            final int blockSize, final long offset, final int memoryOffset)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over. The bytes read
+     * will be in the native byte-order of the machine, but will otherwise be unchanged.
+     * <em>Must not be called for data sets of rank other than 1 and must not be called on Strings!</em>
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<byte[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException;
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueWriter.java
new file mode 100644
index 0000000..b664af4
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5OpaqueWriter.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An interface that provides methods for writing opaque values to HDF5 files. Opaque values are
+ * represented as byte arrays, however, contrary to the methods in {@link IHDF5ByteWriter} there is
+ * no notion on the interpretation of these values. The methods in this writer can be used to store
+ * data sets which are a "black box". Note that there are no dedicated methods for reading opaque
+ * types. Use the methods in {@link IHDF5OpaqueReader} instead which allow you to read any data set
+ * as a byte array.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5OpaqueWriter extends IHDF5OpaqueReader
+{
+
+    /**
+     * Writes out an opaque data type described by <var>tag</var> and defined by a <code>byte</code>
+     * array (of rank 1).
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArray(String)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param tag The tag of the data set.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(final String objectPath, final String tag, final byte[] data);
+
+    /**
+     * Writes out an opaque data type described by <var>tag</var> and defined by a <code>byte</code>
+     * array (of rank 1).
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArray(String)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param tag The tag of the data set.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(final String objectPath, final String tag, final byte[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create.
+     * @param blockSize The size of on block (for block-wise IO)
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     */
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final long size, final int blockSize);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     */
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final int size);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create.
+     * @param blockSize The size of on block (for block-wise IO)
+     * @param features The storage features of the data set.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     */
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final long size, final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates an opaque data set that will be represented as a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     * @return The {@link HDF5OpaqueType} that can be used in methods
+     *         {@link #writeArrayBlock(String, HDF5OpaqueType, byte[], long)} and
+     *         {@link #writeArrayBlockWithOffset(String, HDF5OpaqueType, byte[], int, long)}
+     *         to represent this opaque type.
+     */
+    public HDF5OpaqueType createArray(final String objectPath, final String tag,
+            final int size, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of an opaque data type represented by a <code>byte</code> array (of rank
+     * 1). The data set needs to have been created by
+     * {@link #createArray(String, String, long, int, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, String, long, int, HDF5GenericStorageFeatures)} call
+     * that was used to created the data set.
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArrayBlock(String, int, long)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(final String objectPath, final HDF5OpaqueType dataType,
+            final byte[] data, final long blockNumber);
+
+    /**
+     * Writes out a block of an opaque data type represented by a <code>byte</code> array (of rank
+     * 1). The data set needs to have been created by
+     * {@link #createArray(String, String, long, int, HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #writeArrayBlock(String, HDF5OpaqueType, byte[], long)} if the total size of
+     * the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, String, long, int, HDF5GenericStorageFeatures)} call
+     * that was used to created the data set.
+     * <p>
+     * Note that there is no dedicated method for reading opaque types. Use the method
+     * {@link IHDF5OpaqueReader#readArrayBlockWithOffset(String, int, long)} instead.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final HDF5OpaqueType dataType, final byte[] data, final int dataSize, final long offset);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5Reader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5Reader.java
new file mode 100644
index 0000000..001b16e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5Reader.java
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An interface for reading HDF5 files (HDF5 1.8.x and older).
+ * <p>
+ * The interface focuses on ease of use instead of completeness. As a consequence not all features
+ * of HDF5 are supported by this class, however it covers a large subset. In particular all
+ * information written by {@link IHDF5Writer} can be read by this class.
+ * <p>
+ * The functionality is being made available in two ways:
+ * <ol>
+ * <li>{@link IHDF5SimpleReader} contains the most important methods in one interface. If you are
+ * new to the library, this is a good starting point, see the example code below.</li>
+ * <li>The hierarchical ("quasi-fluent") API provides the full functionality. It is designed along
+ * the data types supported by JHDF5.
+ * <ul>
+ * <li>{@link #file()}: File-level information and operations, has e.g. the
+ * {@link IHDF5FileLevelReadOnlyHandler#close()} method.</li>
+ * <li>{@link #object()}: Object-level information, where "objects" can be data sets, links, groups
+ * or data types, following the concept of an HDF5 object. Here you can find for example the method
+ * {@link IHDF5ObjectReadOnlyInfoProviderHandler#getGroupMemberInformation(String, boolean)} which
+ * gives you information on the members of a group and the method
+ * {@link IHDF5ObjectReadOnlyInfoProviderHandler#tryGetSymbolicLinkTarget(String)} for resolving a
+ * symbolic link.</li>
+ * <li>{@link #bool()}: Reader methods for boolean data sets, including bit fields.</li>
+ * <li>{@link #int8()} / {@link #int16()} / {@link #int16()} / {@link #int32()} / {@link #int64()}:
+ * Reader methods for integer data sets, where the number as part of the method name denotes the
+ * size of the integer type. The methods will always read signed integers, if you need unsigned
+ * integers, you need to convert them with one of the methods in {@link UnsignedIntUtils}.</li>
+ * <li>{@link #float32()} / {@link #float64()}: Reader methods for float data sets, where the number
+ * as part of the name sets the size of the float type.</li>
+ * <li>{@link #time()} / {@link #duration()}: Reader methods for time stamp (or date) and for time
+ * duration data sets.</li>
+ * <li>{@link #string()}: Reader methods for string data sets.</li>
+ * <li>{@link #enumeration()}: Reader methods for enumeration data sets.</li>
+ * <li>{@link #compound()}: Reader methods for compound data sets.</li>
+ * <li>{@link #opaque()}: Reader methods for data sets that are "black boxes" to HDF5 which are
+ * called "opaque data sets" in HDF5 jargon. Here you can also find methods of reading arbitrary
+ * data sets as byte arrays.</li>
+ * <li>{@link #reference()}: Reader methods for HDF5 object references. Note that object references,
+ * though similar to hard links and symbolic links on the first glance, are quite different for
+ * HDF5.</li>
+ * </ul>
+ * </li>
+ * </ol>
+ * <p>
+ * Usage example for {@link IHDF5SimpleReader}:
+ * 
+ * <pre>
+ * IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(new File("test.h5"));
+ * float[] f = reader.readFloatArray("/some/path/dataset");
+ * String s = reader.getStringAttribute("/some/path/dataset", "some key");
+ * reader.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+ at SuppressWarnings("deprecation")
+public interface IHDF5Reader extends IHDF5SimpleReader, IHDF5LegacyReader
+{
+
+    // /////////////////////
+    // File
+    // /////////////////////
+
+    /**
+     * Returns the handler for file-level information and status.
+     */
+    public IHDF5FileLevelReadOnlyHandler file();
+
+    // /////////////////////////////////
+    // Objects, links, groups and types
+    // /////////////////////////////////
+
+    /**
+     * Returns an info provider for HDF5 objects like links, groups, data sets and data types.
+     */
+    public IHDF5ObjectReadOnlyInfoProviderHandler object();
+
+    // /////////////////////
+    // Opaque
+    // /////////////////////
+
+    /**
+     * Returns the full reader for reading data sets and attributes as byte arrays ('opaque') and
+     * obtaining opaque types.
+     */
+    public IHDF5OpaqueReader opaque();
+
+    // /////////////////////
+    // Boolean
+    // /////////////////////
+
+    /**
+     * Returns the full reader for boolean values.
+     */
+    public IHDF5BooleanReader bool();
+
+    // /////////////////////
+    // Bytes
+    // /////////////////////
+
+    /**
+     * Returns the full reader for byte / int8.
+     */
+    public IHDF5ByteReader int8();
+
+    /**
+     * Returns the full reader for unsigned byte / uint8.
+     */
+    public IHDF5ByteReader uint8();
+
+    // /////////////////////
+    // Short
+    // /////////////////////
+
+    /**
+     * Returns the full reader for short / int16.
+     */
+    public IHDF5ShortReader int16();
+
+    /**
+     * Returns the full reader for unsigned short / uint16.
+     */
+    public IHDF5ShortReader uint16();
+
+    // /////////////////////
+    // Int
+    // /////////////////////
+
+    /**
+     * Returns the full reader for int / int32.
+     */
+    public IHDF5IntReader int32();
+
+    /**
+     * Returns the full reader for unsigned int / uint32.
+     */
+    public IHDF5IntReader uint32();
+
+    // /////////////////////
+    // Long
+    // /////////////////////
+
+    /**
+     * Returns the full reader for long / int64.
+     */
+    public IHDF5LongReader int64();
+
+    /**
+     * Returns the full reader for unsigned long / uint64.
+     */
+    public IHDF5LongReader uint64();
+
+    // /////////////////////
+    // Float
+    // /////////////////////
+
+    /**
+     * Returns the full reader for float / float32.
+     */
+    public IHDF5FloatReader float32();
+
+    // /////////////////////
+    // Double
+    // /////////////////////
+
+    /**
+     * Returns the full reader for long / float64.
+     */
+    public IHDF5DoubleReader float64();
+
+    // /////////////////////
+    // Enums
+    // /////////////////////
+
+    /**
+     * Returns the full reader for enumerations.
+     */
+    public IHDF5EnumReader enumeration();
+
+    // /////////////////////
+    // Compounds
+    // /////////////////////
+
+    /**
+     * Returns the full reader for compounds.
+     */
+    public IHDF5CompoundReader compound();
+
+    // /////////////////////
+    // Strings
+    // /////////////////////
+
+    /**
+     * Returns the full reader for strings.
+     */
+    public IHDF5StringReader string();
+
+    // /////////////////////
+    // Date & Time
+    // /////////////////////
+
+    /**
+     * Returns the full reader for date and times.
+     */
+    public IHDF5DateTimeReader time();
+
+    /**
+     * Returns the full reader for time durations.
+     */
+    public IHDF5TimeDurationReader duration();
+
+    // /////////////////////
+    // Object references
+    // /////////////////////
+
+    /**
+     * Returns the full reader for object references.
+     */
+    public IHDF5ReferenceReader reference();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ReaderConfigurator.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReaderConfigurator.java
new file mode 100644
index 0000000..423c74f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReaderConfigurator.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * A configurator for a {@link IHDF5Reader}.
+ * <p>
+ * If you want the reader to perform numeric conversions, call {@link #performNumericConversions()}
+ * before calling {@link #reader()}.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ReaderConfigurator
+{
+
+    /**
+     * Returns <code>true</code>, if this platform supports numeric conversions.
+     */
+    public boolean platformSupportsNumericConversions();
+
+    /**
+     * Will try to perform numeric conversions where appropriate if supported by the platform.
+     * <p>
+     * <strong>Numeric conversions can be platform dependent and are not available on all platforms.
+     * Be advised not to rely on numeric conversions if you can help it!</strong>
+     */
+    public IHDF5ReaderConfigurator performNumericConversions();
+
+    /**
+     * Sets UTF8 character encoding for all paths and all strings in this file. (The default is
+     * ASCII.)
+     * 
+     * @deprecated Should not be used for the reader as it will figure that out by looking at the HDF5 file.
+     */
+    @Deprecated
+    public IHDF5ReaderConfigurator useUTF8CharacterEncoding();
+
+    /**
+     * Switches off automatic dereferencing of unresolved references. Use this when you need to
+     * access file names that start with \0. The down-side of switching off automatic dereferencing
+     * is that you can't provide references as obtained by
+     * {@link IHDF5ReferenceReader#read(String, boolean)} with
+     * <code>resolveName=false</code> in places where a dataset path is required.
+     * <br>
+     * <i>Note: automatic dereferencing is switched on by default.</i>
+     */
+    public IHDF5ReaderConfigurator noAutoDereference();
+
+    /**
+     * Returns an {@link IHDF5Reader} based on this configuration.
+     */
+    public IHDF5Reader reader();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceReader.java
new file mode 100644
index 0000000..9effe5a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceReader.java
@@ -0,0 +1,419 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * An interface for reading references in HDF5 files.
+ * 
+ * @see IHDF5ReferenceWriter
+ * @author Bernd Rinn
+ */
+public interface IHDF5ReferenceReader
+{
+    // //////////////////////////////
+    // Specific to object references
+    // //////////////////////////////
+
+    /**
+     * Resolves the path of a reference which has been read without name resolution.
+     * 
+     * @param reference Reference encoded as string.
+     * @return The path in the HDF5 file.
+     * @see #readArray(String, boolean)
+     * @throws HDF5JavaException if <var>reference</var> is not a string-encoded reference.
+     */
+    public String resolvePath(final String reference) throws HDF5JavaException;
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads an object reference attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the name of the object. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     */
+    public String getAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads an object reference attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the name of the object referenced,
+     *            otherwise returns the references itself.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     */
+    public String getAttr(final String objectPath, final String attributeName,
+            final boolean resolveName);
+
+    /**
+     * Reads a 1D object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     */
+    public String[] getArrayAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a 1D object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     */
+    public String[] getArrayAttr(final String objectPath, final String attributeName,
+            final boolean resolveName);
+
+    /**
+     * Reads an object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     */
+    public MDArray<String> getMDArrayAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads an object reference array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The paths of the objects that the references refers to. Each string may be empty, if
+     *         the corresponding object reference refers to an unnamed object.
+     */
+    public MDArray<String> getMDArrayAttr(final String objectPath, final String attributeName,
+            boolean resolveName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads an object reference from the object <var>objectPath</var>, resolving the name of the
+     * object. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     */
+    public String read(final String objectPath);
+
+    /**
+     * Reads an object reference from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the name of the object referenced,
+     *            otherwise returns the references itself.
+     * @return The path of the object that the reference refers to, or an empty string, if the
+     *         object reference refers to an unnamed object.
+     */
+    public String read(final String objectPath, final boolean resolveName);
+
+    /**
+     * Reads an array of object references from the object <var>objectPath</var>, resolving the
+     * names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The array of the paths of objects that the references refers to. Each string may be
+     *         empty, if the corresponding object reference refers to an unnamed object.
+     */
+    public String[] readArray(final String objectPath);
+
+    /**
+     * Reads an array of object references from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The array of the paths of objects that the references refers to. Each string may be
+     *         empty, if the corresponding object reference refers to an unnamed object.
+     */
+    public String[] readArray(final String objectPath, boolean resolveName);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The referenced data set paths read from the data set. The length will be min(size -
+     *         blockSize*blockNumber, blockSize).
+     */
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths read from the data set. The length will be min(size -
+     *         blockSize*blockNumber, blockSize).
+     */
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber, final boolean resolveName);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>, resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The referenced data set paths block read from the data set.
+     */
+    public String[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a block from an array (of rank 1) of object references from the data set
+     * <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     */
+    public String[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset, final boolean resolveName);
+
+    /**
+     * Reads an array (or rank N) of object references from the object <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The multi-dimensional array of the paths of objects that the references refers to.
+     *         Each string may be empty, if the corresponding object reference refers to an unnamed
+     *         object.
+     */
+    public MDArray<String> readMDArray(final String objectPath);
+
+    /**
+     * Reads an array (or rank N) of object references from the object <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The multi-dimensional array of the paths of objects that the references refers to.
+     *         Each string may be empty, if the corresponding object reference refers to an unnamed
+     *         object.
+     */
+    public MDArray<String> readMDArray(final String objectPath, boolean resolveName);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The referenced data set paths block read from the data set.
+     */
+    public MDArray<String> readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     */
+    public MDArray<String> readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber, final boolean resolveName);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>,
+     * resolving the names of the objects. <br>
+     * <i>Note that resolving the name of the object is a time consuming operation. If you don't
+     * need the name, but want to dereference the dataset, you don't need to resolve the name if the
+     * reader / writer is configured for auto-dereferencing (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}).</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The referenced data set paths block read from the data set.
+     */
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Reads a multi-dimensional array of object references from the data set <var>objectPath</var>. <br>
+     * <i>Note: if the reader has been configured to automatically resolve references (see
+     * {@link IHDF5ReaderConfigurator#noAutoDereference()}), a reference can be provided in all
+     * places where an object path is expected. This is considerably faster than resolving the
+     * name/path of the reference if the name/path by itself is not needed.</i>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param resolveName If <code>true</code>, resolves the names of the objects referenced,
+     *            otherwise returns the references itself.
+     * @return The referenced data set paths block read from the data set.
+     */
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset, final boolean resolveName);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(final String dataSetPath);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(final String dataSetPath,
+            final boolean resolveName);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String dataSetPath);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String dataSetPath, final boolean resolveName);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceWriter.java
new file mode 100644
index 0000000..586e12f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ReferenceWriter.java
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface for writing references. References can refer to objects or regions of datasets. This
+ * version only supports object references.
+ * <p>
+ * <b>Note:</b> References are a low-level feature and it is easy to get dangling or even wrong
+ * references by using them. If you have a choice, don't use them, but use links instead. If you
+ * have to use them, e.g. to comply with a pre-defined format definition, use them with care. The
+ * most important fact to know about references is that they don't keep an object alive. Once the
+ * last link to the object is gone, the object is gone as well. The reference will be
+ * <i>dangling</i>. If, at a later time, another object header is written to the same place in the
+ * file, the reference will refer to this new object, which is most likely an undesired effect
+ * (<i>wrong reference</i>). By default JHDF5 itself deletes existing datasets before writing new
+ * content to a dataset of the same name, which may lead to the described problem of dangling or
+ * wrong references without any explicit call to {@link IHDF5Writer#delete(String)}. Thus, HDF5
+ * files with references should always be opened for writing using the
+ * {@link IHDF5WriterConfigurator#keepDataSetsIfTheyExist()} setting.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ReferenceWriter extends IHDF5ReferenceReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets an object reference attribute to the referenced object.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and <var>referencedObjectPath</var>
+     * must exist, that is it need to have been written before by one of the <code>write()</code> or
+     * <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPath The path of the object to reference.
+     */
+    public void setAttr(final String objectPath, final String name,
+            final String referencedObjectPath);
+
+    /**
+     * Sets a 1D object reference array attribute to referenced objects.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and all
+     * <var>referencedObjectPaths</var> must exist, that is it need to have been written before by
+     * one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPaths The paths of the objects to reference.
+     */
+    public void setArrayAttr(final String objectPath, final String name,
+            final String[] referencedObjectPaths);
+
+    /**
+     * Sets an object reference array attribute to referenced objects.
+     * <p>
+     * Both the object referenced with <var>objectPath</var> and all
+     * <var>referencedObjectPaths</var> must exist, that is it need to have been written before by
+     * one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param referencedObjectPaths The paths of the objects to reference.
+     */
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> referencedObjectPaths);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes an object reference to the referenced object.
+     * <p>
+     * The object referenced with <var>referencedObjectPath</var> must exist, that is it need to
+     * have been written before by one of the <code>write()</code> or <code>create()</code> methods.
+     * 
+     * @param objectPath The name of the object to write.
+     * @param referencedObjectPath The path of the object to reference.
+     */
+    public void write(String objectPath, String referencedObjectPath);
+
+    /**
+     * Writes an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPath The names of the object to write.
+     */
+    public void writeArray(final String objectPath, final String[] referencedObjectPath);
+
+    /**
+     * Writes an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPath The names of the object to write.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(final String objectPath, final String[] referencedObjectPath,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createArray(final String objectPath, final int size);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(final String objectPath, final long size, final int blockSize);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5IntStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(final String objectPath, final int size,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates an array (of rank 1) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>features</code> is <code>HDF5IntStorageFeature.INTNO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of an array (of rank 1) of object references. The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, long, int, HDF5IntStorageFeatures)} call that was used to create
+     * the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the referenced objects to write. The length defines
+     *            the block size. Must not be <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(final String objectPath, final String[] referencedObjectPaths,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of an array (of rank 1) of object references. The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, long, int, HDF5IntStorageFeatures)} call that was used to create
+     * the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the referenced objects to write. The length defines
+     *            the block size. Must not be <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(final String objectPath,
+            final String[] referencedObjectPaths, final int dataSize, final long offset);
+
+    /**
+     * Writes an array (of rank N) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The names of the object to write.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> referencedObjectPaths);
+
+    /**
+     * Writes an array (of rank N) of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The names of the object to write.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> referencedObjectPaths,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array to create. This will be the total dimensions
+     *            for non-extendable data sets and the dimensions of one chunk (extent along each
+     *            axis) for extendable (chunked) data sets. For extendable data sets the initial
+     *            size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createMDArray(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array to create. This will be the total dimensions
+     *            for non-extendable data sets and the dimensions of one chunk (extent along each
+     *            axis) for extendable (chunked) data sets. For extendable data sets the initial
+     *            size of the array along each axis will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>. All columns need to have the same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> referencedObjectPaths, final long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional array of object references.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param referencedObjectPaths The paths of the object references to write. Must not be
+     *            <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final MDLongArray referencedObjectPaths, final int[] blockDimensions,
+            final long[] offset, final int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortReader.java
new file mode 100644
index 0000000..c024764
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortReader.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+
+/**
+ * An interface that provides methods for reading <code>short</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ShortReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>short</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public short getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>short[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public short[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>short</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MDShortArray getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>short</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public short[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>short</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public short read(String objectPath);
+
+    /**
+     * Reads a <code>short</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public short[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MDShortArray array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>short</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MDShortArray array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>short</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>short[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public short[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>short</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>short[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public short[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public short[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public short[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>short</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public short[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MDShortArray readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>short</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>short</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MDShortArray readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<short[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDShortArray>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortWriter.java
new file mode 100644
index 0000000..b8550a1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5ShortWriter.java
@@ -0,0 +1,586 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+
+/**
+ * An interface that provides methods for writing <code>short</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+ * <p>   
+ * <i>Note:</i> If you need to convert from and to unsigned values, use the methods of 
+ * {@link UnsignedIntUtils}.
+ * 
+ * @author Bernd Rinn
+ */
+ // Note: It is a trick for keeping backward compatibility to let this interface extend 
+ // IHDF5UnsignedShortWriter instead of IHDF5ShortReader as it logically should.
+ // Once we remove IHDF5UnsignedShortWriter, uncomment the following line and remove
+ // all @Override annotations and we are fine again.
+//public interface IHDF5ShortWriter extends IHDF5ShortReader
+ at SuppressWarnings("deprecation")
+public interface IHDF5ShortWriter extends IHDF5UnsignedShortWriter
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setAttr(String objectPath, String name, short value);
+
+    /**
+     * Set a <code>short[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setArrayAttr(String objectPath, String name, short[] value);
+
+    /**
+     * Set a multi-dimensional code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMDArrayAttr(String objectPath, String name, MDShortArray value);
+
+    /**
+     * Set a <code>short[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    @Override
+    public void setMatrixAttr(String objectPath, String name, short[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>short</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    @Override
+    public void write(String objectPath, short value);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    @Override
+    public void writeArray(String objectPath, short[] data);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeArray(String objectPath, short[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    @Override
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>short</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    @Override
+    public void writeArrayBlock(String objectPath, short[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, short[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    @Override
+    public void writeArrayBlockWithOffset(String objectPath, short[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(String objectPath, short[][] data);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMatrix(String objectPath, short[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    @Override
+    public void writeMatrixBlock(String objectPath, short[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, short[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    @Override
+    public void writeMatrixBlockWithOffset(String objectPath, short[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDShortArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void writeMDArray(String objectPath, MDShortArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>short</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDShortArray data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>short</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MDShortArray data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>short</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    @Override
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    @Override
+    public void writeMDArrayBlock(String objectPath, MDShortArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>short</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDShortArray data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>short</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MDShortArray data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>short</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>short</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    @Override
+    public void writeMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleReader.java
new file mode 100644
index 0000000..d7c8a73
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleReader.java
@@ -0,0 +1,349 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.Closeable;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * A HDF5 reader which contains only the basic methods. If you feel overwhelmed with all the methods
+ * of {@link IHDF5Reader}, then assign the reader to a {@link IHDF5SimpleReader} variable and let
+ * the code completion of your IDE help you find the method you are looking for.
+ * <p>
+ * Usage:
+ * 
+ * <pre>
+ * IHDF5SimpleReader reader = HDF5FactoryProvider.get().openForReading(new File("test.h5"));
+ * float[] f = reader.readFloatArray("/some/path/dataset");
+ * reader.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5SimpleReader extends Closeable
+{
+
+    /**
+     * Closes this object and the file referenced by this object. This object must not be used after
+     * being closed.
+     */
+    @Override
+    public void close();
+
+    /**
+     * Returns <code>true</code>, if <var>objectPath</var> exists and <code>false</code> otherwise.
+     */
+    public boolean exists(final String objectPath);
+
+    /**
+     * Returns <code>true</code> if the <var>objectPath</var> exists and represents a group and
+     * <code>false</code> otherwise.
+     */
+    public boolean isGroup(final String objectPath);
+
+    /**
+     * Returns the information about a data set as a {@link HDF5DataSetInformation} object. It is a
+     * failure condition if the <var>dataSetPath</var> does not exist or does not identify a data
+     * set.
+     * 
+     * @param dataSetPath The name (including path information) of the data set to return
+     *            information about.
+     */
+    public HDF5DataSetInformation getDataSetInformation(final String dataSetPath);
+
+    /**
+     * Returns the members of <var>groupPath</var>. The order is <i>not</i> well defined.
+     * 
+     * @param groupPath The path of the group to get the members for.
+     * @throws IllegalArgumentException If <var>groupPath</var> is not a group.
+     */
+    public List<String> getGroupMembers(final String groupPath);
+
+    /**
+     * Reads the data set <var>objectPath</var> as byte array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public byte[] readAsByteArray(final String objectPath);
+
+    /**
+     * Reads a <code>Boolean</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a boolean type.
+     */
+    public boolean readBoolean(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a bit field (which can be considered the equivalent to a boolean array of rank 1) from
+     * the data set <var>objectPath</var> and returns it as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by
+     * {@link IHDF5LongWriter#writeArray(String, long[])} cannot be read back by this method but
+     * will throw a {@link HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The {@link BitSet} read from the data set.
+     * @throws HDF5DatatypeInterfaceException If the <var>objectPath</var> is not of bit field type.
+     */
+    public BitSet readBitField(final String objectPath) throws HDF5DatatypeInterfaceException;
+
+    /**
+     * Reads a <code>int</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public int readInt(final String objectPath);
+
+    /**
+     * Reads a <code>int</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public int[] readIntArray(final String objectPath);
+
+    /**
+     * Reads a <code>int</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public int[][] readIntMatrix(final String objectPath);
+
+    /**
+     * Reads a <code>long</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public long readLong(final String objectPath);
+
+    /**
+     * Reads a <code>long</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public long[] readLongArray(final String objectPath);
+
+    /**
+     * Reads a <code>long</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public long[][] readLongMatrix(final String objectPath);
+
+    /**
+     * Reads a <code>float</code> value from the data set <var>objectPath</var>. This method doesn't
+     * check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public float readFloat(final String objectPath);
+
+    /**
+     * Reads a <code>float</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public float[] readFloatArray(final String objectPath);
+
+    /**
+     * Reads a <code>float</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public float[][] readFloatMatrix(final String objectPath);
+
+    /**
+     * Reads a <code>double</code> value from the data set <var>objectPath</var>. This method
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public double readDouble(final String objectPath);
+
+    /**
+     * Reads a <code>double</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public double[] readDoubleArray(final String objectPath);
+
+    /**
+     * Reads a <code>double</code> matrix (array of arrays) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public double[][] readDoubleMatrix(final String objectPath);
+
+    /**
+     * Reads a date value from the data set <var>objectPath</var>. It needs to have been written by
+     * {@link IHDF5SimpleWriter#writeDate(String, Date)}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date readDate(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a date array (of rank 1) from the data set <var>objectPath</var>. It needs to have been
+     * written by {@link IHDF5SimpleWriter#writeDateArray(String, Date[])}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time stamp as {@link Date}.
+     * @throws HDF5JavaException If the <var>objectPath</var> does not denote a time stamp.
+     */
+    public Date[] readDateArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration value from the data set <var>objectPath</var>. It needs to have been
+     * written by {@link IHDF5SimpleWriter#writeTimeDuration(String, HDF5TimeDuration)}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration in seconds.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDuration readTimeDuration(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration array from the data set <var>objectPath</var>. It needs to have been
+     * written by {@link IHDF5SimpleWriter#writeTimeDurationArray(String, HDF5TimeDurationArray)}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration in seconds.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not defined as type variant
+     *             {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     */
+    public HDF5TimeDurationArray readTimeDurationArray(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>String</code> from the data set <var>objectPath</var>. Considers '\0' as end of
+     * string. This needs to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String readString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>String</code> array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public String[] readStringArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a compound from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T readCompound(final String objectPath, final Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a compound array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param pojoClass The class to return the result in. Use {@link HDF5CompoundDataMap} to get it
+     *            in a map, {@link HDF5CompoundDataList} to get it in a list, and
+     *            <code>Object[]</code> to get it in an array, or use a pojo (Data Transfer Object),
+     *            in which case the compound members will be mapped to Java fields.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a compound data set.
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> T[] readCompoundArray(final String objectPath, final Class<T> pojoClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var> or if
+     *             <var>enumClass</var> is incompatible with the HDF5 enumeration type of
+     *             <var>objectPath</var>.
+     */
+    public <T extends Enum<T>> T readEnum(final String objectPath, Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set as a String.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum type.
+     */
+    public String readEnumAsString(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> value array from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param enumClass the {@link Enum} class to represent the values of.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not of <var>enumType</var>.
+     */
+    public <T extends Enum<T>> T[] readEnumArray(final String objectPath, Class<T> enumClass)
+            throws HDF5JavaException;
+
+    /**
+     * Reads an <code>Enum</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set as an array of Strings.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not an enum type.
+     */
+    public String[] readEnumArrayAsString(final String objectPath) throws HDF5JavaException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleWriter.java
new file mode 100644
index 0000000..7a621e5
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5SimpleWriter.java
@@ -0,0 +1,312 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.BitSet;
+import java.util.Date;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * A HDF5 writer which contains only the basic methods. If you feel overwhelmed with all the methods
+ * of {@link IHDF5Writer}, then assign the writer to a {@link IHDF5SimpleWriter} variable and let
+ * the code completion of your IDE help you find the method you are looking for.
+ * <p>
+ * Usage:
+ * 
+ * <pre>
+ * float[] f = new float[100];
+ * ...
+ * IHDF5SimpleWriter writer = HDF5FactoryProvider.get().open(new File("test.h5"));
+ * writer.writeFloatArray("/some/path/dataset", f);
+ * writer.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5SimpleWriter extends IHDF5SimpleReader
+{
+
+    /**
+     * Removes an object from the file. If there is more than one link to the object, only the
+     * specified link will be removed.
+     */
+    public void delete(String objectPath);
+
+    /**
+     * Writes out a <code>boolean</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     */
+    public void writeBoolean(final String objectPath, final boolean value);
+
+    /**
+     * Writes out a bit field ((which can be considered the equivalent to a boolean array of rank
+     * 1), provided as a Java {@link BitSet}.
+     * <p>
+     * Note that the storage form of the bit array is a <code>long[]</code>. However, it is marked
+     * in HDF5 to be interpreted bit-wise. Thus a data set written by this method cannot be read
+     * back by {@link #readLongArray(String)} but will throw a
+     * {@link ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeBitField(final String objectPath, final BitSet data);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1). Uses a compact storage layout. Should only
+     * be used for small data sets.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeByteArray(final String objectPath, final byte[] data);
+
+    /**
+     * Writes out a <code>int</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeInt(final String objectPath, final int value);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeIntArray(final String objectPath, final int[] data);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeIntMatrix(final String objectPath, final int[][] data);
+
+    /**
+     * Writes out a <code>long</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeLong(final String objectPath, final long value);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeLongArray(final String objectPath, final long[] data);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeLongMatrix(final String objectPath, final long[][] data);
+
+    /**
+     * Writes out a <code>float</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeFloat(final String objectPath, final float value);
+
+    /**
+     * Writes out a <code>float</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeFloatArray(final String objectPath, final float[] data);
+
+    /**
+     * Writes out a <code>float</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeFloatMatrix(final String objectPath, final float[][] data);
+
+    /**
+     * Writes out a <code>double</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void writeDouble(final String objectPath, final double value);
+
+    /**
+     * Writes out a <code>double</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeDoubleArray(final String objectPath, final double[] data);
+
+    /**
+     * Writes out a <code>double</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeDoubleMatrix(final String objectPath, final double[][] data);
+
+    /**
+     * Writes out a time stamp value provided as a {@link Date}. The data set will be tagged as type
+     * variant {@link HDF5DataTypeVariant#TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH}.
+     * <p>
+     * <em>Note: This is a convenience method for <code></code> </em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param date The date to write.
+     */
+    public void writeDate(final String objectPath, final Date date);
+
+    /**
+     * Writes out a {@link Date} array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dates The dates to write.
+     */
+    public void writeDateArray(final String objectPath, final Date[] dates);
+
+    /**
+     * Writes out a time duration value.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write.
+     */
+    public void writeTimeDuration(final String objectPath, final HDF5TimeDuration timeDuration);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write.
+     */
+    public void writeTimeDurationArray(final String objectPath,
+            final HDF5TimeDurationArray timeDurations);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of the <var>data</var>.
+     * @deprecated Use {@link #writeString(String, String)} instead.
+     */
+    @Deprecated
+    public void writeString(final String objectPath, final String data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeString(final String objectPath, final String data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @deprecated Use {@link #writeStringArray(String, String[])} instead.
+     */
+    @Deprecated
+    public void writeStringArray(final String objectPath, final String[] data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by the longest element.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeStringArray(final String objectPath, final String[] data);
+
+    /**
+     * Writes out a compound value. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeCompound(String objectPath, T data);
+
+    /**
+     * Writes out an array (of rank 1) of compound values. The type is inferred based on the values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The value of the data set. May be a pojo (Data Transfer Object), a
+     *            {@link HDF5CompoundDataMap}, {@link HDF5CompoundDataList} or <code>Object[]</code>
+     *            .
+     * @see CompoundType
+     * @see CompoundElement
+     */
+    public <T> void writeCompoundArray(final String objectPath, final T[] data);
+
+    /**
+     * Writes out an enum value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value of the data set.
+     * @throws HDF5JavaException If the enum type of <var>value</var> is not a type of this file.
+     */
+    public <T extends Enum<T>> void writeEnum(final String objectPath, final Enum<T> value) throws HDF5JavaException;
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     */
+    public <T extends Enum<T>> void writeEnumArray(String objectPath, Enum<T>[] data);
+
+    /**
+     * Writes out an array of enum values.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param options The allowed values of the enumeration type.
+     * @param data The data to write.
+     */
+    public void writeEnumArray(String objectPath, String[] options, String[] data);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5StringReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5StringReader.java
new file mode 100644
index 0000000..4aea91a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5StringReader.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * An interface that provides methods for reading <code>String</code> values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5StringReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a string attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public String getAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a string attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Does not consider '\0' as end of string but reads the full length of
+     * the attribute.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public String getAttrRaw(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a string array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public String[] getArrayAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a string array attribute named <var>attributeName</var> from the object
+     * <var>objectPath</var>. Does not consider '\0' as end of string but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public String[] getArrayAttrRaw(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional string array attribute named <var>attributeName</var> from the
+     * object <var>objectPath</var>. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public MDArray<String> getMDArrayAttr(final String objectPath, final String attributeName);
+
+    /**
+     * Reads a multi-dimensional string array attribute named <var>attributeName</var> from the
+     * object <var>objectPath</var>. Does not consider '\0' as end of string but reads the full
+     * length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public MDArray<String> getMDArrayAttrRaw(final String objectPath, final String attributeName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a string from the data set <var>objectPath</var>. Considers '\0' as end of string. This
+     * needs to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String read(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>String</code> from the data set <var>objectPath</var>. Does not consider '\0'
+     * as end of string but reads the full length of the string. This needs to be a string type.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String readRaw(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a string array (of rank 1) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArray(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a string array (of rank 1) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Does not consider '\0' as end of string but reads the
+     * full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArrayRaw(final String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param blockNumber The number of the block to read from the data set (the offset is
+     *            <code>blockSize * blockNumber</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Does not consider '\0' as end of string
+     * but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param blockNumber The number of the block to read from the data set (the offset is
+     *            <code>blockSize * blockNumber</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArrayBlockRaw(final String objectPath, final int blockSize,
+            final long blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a block of a string array (of rank 1) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Does not consider '\0' as end of string
+     * but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The size of the block to read from the data set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public String[] readArrayBlockWithOffsetRaw(final String objectPath, final int blockSize,
+            final long offset);
+
+    /**
+     * Reads a string array (of rank N) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArray(final String objectPath);
+
+    /**
+     * Reads a string array (of rank N) from the data set <var>objectPath</var>. The elements of
+     * this data set need to be a string type. Does not considers '\0' as end of string but reads
+     * the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArrayRaw(final String objectPath);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Considers '\0' as end of string.
+     * Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param blockNumber The number of the block to read from the data set (the offset in each
+     *            dimension i is <code>blockSize[i] * blockNumber[i]</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Does not consider '\0' as end of string
+     * but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param blockNumber The number of the block to read from the data set (the offset in each
+     *            dimension i is <code>blockSize[i] * blockNumber[i]</code>).
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArrayBlockRaw(final String objectPath,
+            final int[] blockDimensions, final long[] blockNumber);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Reads a block of a string array (of rank N) from the data set <var>objectPath</var>. The
+     * elements of this data set need to be a string type. Does not consider '\0' as end of string
+     * but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The dimensions (along each axis) of the block to read from the data
+     *            set.
+     * @param offset The offset of the block in the data set.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not a string type.
+     */
+    public MDArray<String> readMDArrayBlockWithOffsetRaw(final String objectPath,
+            final int[] blockDimensions, final long[] offset);
+
+    /**
+     * Provides all natural blocks of this one-dimensional string data set to iterate over.
+     * Considers '\0' as end of string.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocks(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional string data set to iterate over. Does not
+     * consider '\0' as end of string but reads the full length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<String[]>> getArrayNaturalBlocksRaw(final String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional string data set to iterate over.
+     * Considers '\0' as end of string.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocks(
+            final String objectPath);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional string data set to iterate over. Does
+     * not consider '\0' as end of string but reads the full length.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MDArray<String>>> getMDArrayNaturalBlocksRaw(
+            final String objectPath);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5StringWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5StringWriter.java
new file mode 100644
index 0000000..c12f419
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5StringWriter.java
@@ -0,0 +1,593 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * An interface that provides methods for writing <code>String</code> values to HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5StringWriter extends IHDF5StringReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Sets a string attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(final String objectPath, final String name, final String value);
+
+    /**
+     * Sets a string attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of the value.
+     */
+    public void setAttr(final String objectPath, final String name, final String value,
+            final int maxLength);
+
+    /**
+     * Sets a string array attribute on the referenced object. The length of the array is taken to
+     * be the longest string in <var>value</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(final String objectPath, final String name, final String[] value);
+
+    /**
+     * Sets a string array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of any element in <var>value</var>.
+     */
+    public void setArrayAttr(final String objectPath, final String name, final String[] value,
+            final int maxLength);
+
+    /**
+     * Sets a multi-dimensional string array attribute on the referenced object. The length of the
+     * array is taken to be the longest string in <var>value</var>.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> value);
+
+    /**
+     * Sets a multi-dimensional string array attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     * @param maxLength The maximal length of the value.
+     */
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MDArray<String> value, final int maxLength);
+
+    /**
+     * Sets a string attribute with variable length on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttrVL(final String objectPath, final String name, final String value);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of the <var>data</var>.
+     */
+    public void write(final String objectPath, final String data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length (which is the length of the
+     * string <var>data</var>).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void write(final String objectPath, final String data);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void write(final String objectPath, final String data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> with a fixed maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of the <var>data</var>.
+     * @param features The storage features of the data set.
+     */
+    public void write(final String objectPath, final String data, final int maxLength,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(final String objectPath, final String[] data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     */
+    public void writeArray(final String objectPath, final String[] data, final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> array (of rank 1). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(final String objectPath, final String[] data, final int maxLength,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is defined by the longest string in <var>data</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * fixed maximal length which is given by <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param maxLength The maximal length of any of the strings in <var>data</var>.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(final String objectPath, final MDArray<String> data,
+            final int maxLength, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createArray(final String objectPath, final int maxLength, final int size);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the String array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     */
+    public void createArray(final String objectPath, final int maxLength, final int size,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank 1) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param size The size of the String array to create. When using extendable data sets ((see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data set
+     *            smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     */
+    public void createArray(final String objectPath, final int maxLength, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank 1). The data set needs to have
+     * been created by
+     * {@link #createArray(String, int, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, int, long, int, HDF5GenericStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(final String objectPath, final String[] data,
+            final long blockNumber);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank 1). The data set needs to have
+     * been created by
+     * {@link #createArray(String, int, long, int, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, String[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, int, long, int, HDF5GenericStorageFeatures)} call that was
+     * used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(final String objectPath, final String[] data,
+            final int dataSize, final long offset);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     */
+    public void createMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block in each dimension (for block-wise IO). Ignored if no
+     *            extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(final String objectPath, final int maxLength,
+            final int[] dimensions, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String</code> array (of rank N) for Strings of length <var>maxLength</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param maxLength The maximal length of one String in the array.
+     * @param dimensions The size of the String array to create. When using extendable data sets
+     *            ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data
+     *            set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block in each dimension (for block-wise IO). Ignored if no
+     *            extendable data sets are used (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(final String objectPath, final int maxLength,
+            final long[] dimensions, final int[] blockSize,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank N). The data set needs to have
+     * been created by
+     * {@link #createMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write in each dimension.
+     */
+    public void writeMDArrayBlock(final String objectPath, final MDArray<String> data,
+            final long[] blockNumber);
+
+    /**
+     * Writes out a block of a <code>String</code> array (of rank N). The data set needs to have
+     * been created by
+     * {@link #createMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)}
+     * beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createMDArray(String, int, long[], int[], HDF5GenericStorageFeatures)} call
+     * that was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeMDArrayBlockWithOffset(final String objectPath,
+            final MDArray<String> data, final long[] offset);
+
+    /**
+     * Writes out a <code>String</code> with variable maximal length.
+     * <p>
+     * The advantage of this method over {@link #write(String, String)} is that when writing a
+     * new string later it can have a different (also greater) length. The disadvantage is that it
+     * it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeVL(final String objectPath, final String data);
+
+    /**
+     * Writes out a <code>String[]</code> where each String of the array has a variable maximal
+     * length.
+     * <p>
+     * The advantage of this method over {@link #writeArray(String, String[])} is that when
+     * writing a new string later it can have a different (also greater) length. The disadvantage is
+     * that it it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArrayVL(final String objectPath, final String[] data);
+
+    /**
+     * Writes out a <code>String[]</code> where each String of the array has a variable maximal
+     * length.
+     * <p>
+     * The advantage of this method over {@link #writeArray(String, String[])} is that when
+     * writing a new string later it can have a different (also greater) length. The disadvantage is
+     * that it it is more time consuming to read and write this kind of string and that it can't be
+     * compressed.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArrayVL(final String objectPath, final String[] data,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     */
+    public void createArrayVL(final String objectPath, final int size);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The intial size of the array.
+     * @param blockSize The size of block in the array.
+     */
+    public void createArrayVL(final String objectPath, final long size,
+            final int blockSize);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The initial size of the array.
+     * @param blockSize The size of block in the array.
+     * @param features The storage features of the data set.
+     */
+    public void createArrayVL(final String objectPath, final long size,
+            final int blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a <code>String[]</code> where each String of the array has a variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param features The storage features of the data set.
+     */
+    public void createArrayVL(final String objectPath, final int size,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArrayVL(final String objectPath, final int[] dimensions,
+            final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     */
+    public void createMDArrayVL(final String objectPath, final int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param blockSize The size of a contiguously stored block (along each axis) in the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArrayVL(final String objectPath, final long[] dimensions,
+            final int[] blockSize, final HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>String</code> array where each String of the array has a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The initial dimensions (along each axis) of the array.
+     * @param blockSize The size of a contiguously stored block (along each axis) in the array.
+     */
+    public void createMDArrayVL(final String objectPath, final long[] dimensions,
+            final int[] blockSize);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeMDArrayVL(final String objectPath, final MDArray<String> data);
+
+    /**
+     * Writes out a <code>String</code> array (of rank N). Each element of the array will have a
+     * variable maximal length.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArrayVL(final String objectPath,
+            final MDArray<String> data, final HDF5GenericStorageFeatures features);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationReader.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationReader.java
new file mode 100644
index 0000000..1ed462a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationReader.java
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * An interface that provides methods for reading time duration values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5TimeDurationReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the attribute <var>attributeName</var> of data set
+     * <var>objectPath</var> is a time duration and <code>false</code> otherwise.
+     */
+    public boolean isTimeDuration(String objectPath, String attributeName) throws HDF5JavaException;
+
+    /**
+     * Returns the time unit, if the attribute given by <var>attributeName</var> of object
+     * <var>objectPath</var> is a time duration and <code>null</code> otherwise.
+     */
+    public HDF5TimeUnit tryGetTimeUnit(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a time duration attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time duration.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDuration getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a time duration array attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time duration.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationArray getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimension time duration array attribute named <var>attributeName</var> from the
+     * data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The time duration.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationMDArray getMDArrayAttr(String objectPath, String attributeName);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Returns <code>true</code>, if the data set given by <var>objectPath</var> is a time duration
+     * and <code>false</code> otherwise.
+     */
+    public boolean isTimeDuration(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Returns the time unit, if the data set given by <var>objectPath</var> is a time duration and
+     * <code>null</code> otherwise.
+     */
+    public HDF5TimeUnit tryGetTimeUnit(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration value and its unit from the data set <var>objectPath</var>. It needs to
+     * be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)}.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration and its unit.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDuration read(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a time duration array from the data set <var>objectPath</var>. It needs to be tagged as
+     * one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniantly by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The time duration in seconds.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationArray readArray(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * It needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be
+     *         <code>min(size - blockSize*blockNumber,
+     *         blockSize)</code>.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationArray readArrayBlock(String objectPath, int blockSize, long blockNumber)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a block of a time duration array (of rank 1) from the data set <var>objectPath</var>.
+     * It needs to be tagged as one of the type variants that indicate a time duration, for example
+     * {@link HDF5DataTypeVariant#TIME_DURATION_SECONDS}.
+     * <p>
+     * This tagging is done by the writer when using
+     * {@link IHDF5Writer#writeTimeDuration(String, HDF5TimeDuration)} or can be done by calling
+     * {@link IHDF5ObjectReadWriteInfoProviderHandler#setTypeVariant(String, HDF5DataTypeVariant)},
+     * most conveniently by code like
+     * 
+     * <pre>
+     * writer.addTypeVariant("/dataSetPath", HDF5TimeUnit.SECONDS.getTypeVariant());
+     * </pre>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>long[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with
+     *            0).
+     * @return The data block read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationArray readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset) throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set of time durations to iterate
+     * over.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of a time duration data type or not of rank
+     *             1.
+     */
+    public Iterable<HDF5DataBlock<HDF5TimeDurationArray>> getArrayNaturalBlocks(String objectPath)
+            throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional array of time durations from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     * @throws HDF5JavaException If the <var>objectPath</var> is not tagged as a type variant that
+     *             corresponds to a time duration.
+     */
+    public HDF5TimeDurationMDArray readMDArray(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional array of time durations from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public HDF5TimeDurationMDArray readMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber) throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional array of time durations from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public HDF5TimeDurationMDArray readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+
+    /**
+     * Reads a multi-dimensional array of time durations from the data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, HDF5TimeDurationMDArray array,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>long</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath, HDF5TimeDurationMDArray array,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<HDF5TimeDurationMDArray>> getMDArrayNaturalBlocks(
+            String dataSetPath);
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationWriter.java
new file mode 100644
index 0000000..cf90ea3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5TimeDurationWriter.java
@@ -0,0 +1,356 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An interface that provides methods for writing time duration values from HDF5 files.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5TimeDurationWriter extends IHDF5TimeDurationReader
+{
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a time duration value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDuration The value of the attribute.
+     * @param timeUnit The unit of the attribute.
+     */
+    public void setAttr(String objectPath, String attributeName,
+            long timeDuration, HDF5TimeUnit timeUnit);
+
+    /**
+     * Set a time duration value as attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDuration The value of the attribute.
+     */
+    public void setAttr(String objectPath, String attributeName,
+            HDF5TimeDuration timeDuration);
+
+    /**
+     * Set a time duration array value as attribute on the referenced object. The smallest time unit
+     * in <var>timeDurations</var> will be used as the time unit of the array.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * <p>
+     * <em>Note: Time durations are stored as a <code>long[]</code> array.</em>
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDurations The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String attributeName,
+            HDF5TimeDurationArray timeDurations);
+
+    /**
+     * Set a multi-dimensional time duration array value as attribute on the referenced object. The
+     * smallest time unit in <var>timeDurations</var> will be used as the time unit of the array.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * <p>
+     * <em>Note: Time durations are stored as a <code>long[]</code> array.</em>
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param attributeName The name of the attribute.
+     * @param timeDurations The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String attributeName,
+            HDF5TimeDurationMDArray timeDurations);
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a time duration value.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write in the given <var>timeUnit</var>.
+     * @param timeUnit The unit of the time duration.
+     */
+    public void write(String objectPath, long timeDuration, HDF5TimeUnit timeUnit);
+
+    /**
+     * Writes out a time duration value.
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDuration The duration of time to write.
+     */
+    public void write(String objectPath, HDF5TimeDuration timeDuration);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long</code> values.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. This will be the total size for
+     *            non-extendable data sets and the size of one chunk for extendable (chunked) data
+     *            sets. For extendable data sets the initial size of the array will be 0, see
+     *            {@link ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator#dontUseExtendableDataTypes}.
+     * @param timeUnit The unit of the time duration.
+     */
+    public void createArray(String objectPath, int size, HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param timeUnit The unit of the time duration.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the data set to create.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data
+     *            sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})
+     *            and <code>deflate == false</code>.
+     * @param timeUnit The unit of the time duration.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5TimeUnit timeUnit, HDF5GenericStorageFeatures features);
+
+    /**
+     * Creates a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the array to create. This will be the total size for non-extendable
+     *            data sets and the size of one chunk for extendable (chunked) data sets. For
+     *            extendable data sets the initial size of the array will be 0, see
+     *            {@link HDF5GenericStorageFeatures}.
+     * @param timeUnit The unit of the time duration.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size, HDF5TimeUnit timeUnit,
+            HDF5GenericStorageFeatures features);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write.
+     */
+    public void writeArray(String objectPath, HDF5TimeDurationArray timeDurations);
+
+    /**
+     * Writes out a time duration array (of rank 1).
+     * <p>
+     * <em>Note: Time durations are stored as <code>long[]</code> arrays and tagged as the according
+     * type variant.</em>
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param timeDurations The time durations to write.
+     * @param features The storage features used to store the array.
+     */
+    public void writeArray(String objectPath, HDF5TimeDurationArray timeDurations,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a time duration array. The data set needs to have been created by
+     * {@link #createArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * <i>Note:</i> For best performance, the block size in this method should be chosen to be equal
+     * to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, HDF5TimeDurationArray data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a time duration array. The data set needs to have been created by
+     * {@link #createArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, HDF5TimeDurationArray, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * <p>
+     * <i>Note:</i> For best performance, the typical <var>dataSize</var> in this method should be
+     * chosen to be equal to the <var>blockSize</var> argument of the
+     * {@link #createArray(String, long, int, HDF5TimeUnit, HDF5GenericStorageFeatures)} call that
+     * was used to create the data set.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath,
+            HDF5TimeDurationArray data, int dataSize, long offset);
+
+    /**
+     * Writes out a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, HDF5TimeDurationMDArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, HDF5TimeDurationMDArray data);
+
+    /**
+     * Creates a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial
+     *            dimensions and the dimensions of a chunk of the array will be
+     *            <var>dimensions</var>. When the writer is configured to <i>enforce</i> a
+     *            on-extendable data set, the initial dimensions equal the dimensions and will be
+     *            <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5TimeUnit timeUnit);
+
+    /**
+     * Creates a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>long</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5TimeUnit timeUnit, HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5TimeUnit timeUnit,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, HDF5TimeDurationMDArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath,
+            HDF5TimeDurationMDArray data, long[] offset);
+
+    /**
+     * Writes out a block of a multi-dimensional array of time durations.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath,
+            HDF5TimeDurationMDArray data, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedByteWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedByteWriter.java
new file mode 100644
index 0000000..c7fb7d0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedByteWriter.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+
+/**
+ * An interface that provides methods for writing unsigned <code>byte</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:</i> Use the methods in {@link UnsignedIntUtils} to convert from and to unsigned values.
+ * 
+ * @deprecated Use {@link IHDF5ByteWriter} instead, it has all methods of this interface.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5UnsignedByteWriter extends IHDF5ByteReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, byte value);
+
+    /**
+     * Set a <code>byte[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, byte[] value);
+
+    /**
+     * Set a multi-dimensional code>byte</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDByteArray value);
+
+    /**
+     * Set a <code>byte[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, byte[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>byte</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, byte value);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, byte[] data);
+
+    /**
+     * Writes out a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, byte[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>byte</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>byte</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the byte array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, byte[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>byte</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, byte[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, byte[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, byte[][] data);
+
+    /**
+     * Writes out a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, byte[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>byte</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the byte matrix to create.
+     * @param sizeY The size of the y dimension of the byte matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, byte[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, byte[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, byte[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>byte</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, byte[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, byte[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDByteArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDByteArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>byte</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDByteArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            long[] offset);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>byte</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDByteArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedIntWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedIntWriter.java
new file mode 100644
index 0000000..3d37828
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedIntWriter.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+
+/**
+ * An interface that provides methods for writing unsigned <code>int</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:</i> Use the methods in {@link UnsignedIntUtils} to convert from and to unsigned values.
+ * 
+ * @deprecated Use {@link IHDF5IntWriter} instead, it has all methods of this interface.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5UnsignedIntWriter extends IHDF5IntReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, int value);
+
+    /**
+     * Set a <code>int[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, int[] value);
+
+    /**
+     * Set a multi-dimensional code>int</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDIntArray value);
+
+    /**
+     * Set a <code>int[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, int[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>int</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, int value);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, int[] data);
+
+    /**
+     * Writes out a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, int[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>int</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>int</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the int array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, int[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>int</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, int[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, int[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, int[][] data);
+
+    /**
+     * Writes out a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, int[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>int</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the int matrix to create.
+     * @param sizeY The size of the y dimension of the int matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, int[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, int[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, int[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>int</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, int[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, int[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDIntArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDIntArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>int</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDIntArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            long[] offset);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>int</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDIntArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedLongWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedLongWriter.java
new file mode 100644
index 0000000..b1ceb96
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedLongWriter.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * An interface that provides methods for writing unsigned <code>long</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:</i> Use the methods in {@link UnsignedIntUtils} to convert from and to unsigned values.
+ * 
+ * @deprecated Use {@link IHDF5LongWriter} instead, it has all methods of this interface.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5UnsignedLongWriter extends IHDF5LongReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, long value);
+
+    /**
+     * Set a <code>long[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, long[] value);
+
+    /**
+     * Set a multi-dimensional code>long</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDLongArray value);
+
+    /**
+     * Set a <code>long[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, long[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>long</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, long value);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, long[] data);
+
+    /**
+     * Writes out a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, long[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>long</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>long</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the long array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, long[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>long</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, long[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, long[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, long[][] data);
+
+    /**
+     * Writes out a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, long[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>long</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the long matrix to create.
+     * @param sizeY The size of the y dimension of the long matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, long[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, long[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, long[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>long</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, long[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, long[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDLongArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDLongArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>long</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDLongArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            long[] offset);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>long</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDLongArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedShortWriter.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedShortWriter.java
new file mode 100644
index 0000000..841c5c5
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5UnsignedShortWriter.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+
+/**
+ * An interface that provides methods for writing unsigned <code>short</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:</i> Use the methods in {@link UnsignedIntUtils} to convert from and to unsigned values.
+ * 
+ * @deprecated Use {@link IHDF5ShortWriter} instead, it has all methods of this interface.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5UnsignedShortWriter extends IHDF5ShortReader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, short value);
+
+    /**
+     * Set a <code>short[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, short[] value);
+
+    /**
+     * Set a multi-dimensional code>short</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MDShortArray value);
+
+    /**
+     * Set a <code>short[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, short[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>short</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, short value);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, short[] data);
+
+    /**
+     * Writes out a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, short[] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>short</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5IntStorageFeatures features);
+    
+    /**
+     * Creates a <code>short</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the short array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5IntStorageFeature.INT_NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, short[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>short</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5IntStorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, short[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, short[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, short[][] data);
+
+    /**
+     * Writes out a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, short[][] data, 
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5IntStorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>short</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the short matrix to create.
+     * @param sizeY The size of the y dimension of the short matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, short[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, short[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>short</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5IntStorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, short[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, short[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MDShortArray data);
+
+    /**
+     * Writes out a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MDShortArray data,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>short</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5IntStorageFeatures#INT_CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5IntStorageFeatures#INT_CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5IntStorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5IntStorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MDShortArray data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            long[] offset);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>short</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MDShortArray data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5Writer.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5Writer.java
new file mode 100644
index 0000000..cbff4c8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5Writer.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * An interface for writing HDF5 files (HDF5 1.6.x or HDF5 1.8.x).
+ * <p>
+ * The interface focuses on ease of use instead of completeness. As a consequence not all features
+ * of HDF5 are supported by this class, however it covers a large subset.
+ * <p>
+ * The functionality is being made available in two ways:
+ * <ol>
+ * <li>{@link IHDF5SimpleWriter} contains the most important methods in one interface. If you are
+ * new to the library, this is a good starting point, see the example code below.</li>
+ * <li>The hierarchical ("quasi-fluent") API provides the full functionality. It is designed along
+ * the data types supported by JHDF5.
+ * <ul>
+ * <li>{@link #file()}: File-level information and operations, has e.g. the
+ * {@link IHDF5FileLevelReadWriteHandler#close()} and {@link IHDF5FileLevelReadWriteHandler#flush()}
+ * methods.</li>
+ * <li>{@link #object()}: Object-level information, where "objects" can be data sets, links, groups
+ * or data types, following the concept of an HDF5 object. Here you can find methods like
+ * {@link IHDF5ObjectReadWriteInfoProviderHandler#createGroup(String)} for creating a new group, or
+ * {@link IHDF5ObjectReadWriteInfoProviderHandler#createSoftLink(String, String)} for creating a
+ * symbolic link.</li>
+ * <li>{@link #bool()}: Writer methods for boolean data sets, including bit fields.</li>
+ * <li>{@link #int8()} / {@link #int16()} / {@link #int16()} / {@link #int32()} / {@link #int64()}:
+ * Writer methods for signed integer data sets, where the number as part of the method name denotes
+ * the size of the integer type.</li>
+ * <li>{@link #uint8()} / {@link #uint16()} / {@link #uint16()} / {@link #uint32()} /
+ * {@link #int64()}: Writer methods for unsigned integer data sets, where the number as part of the
+ * name sets the size of the integer type. While the data sets take signed integer values due to
+ * Java's lack of unsigned integer types, they <i>represent</i> them as unsigned values in the HDF5
+ * file. See {@link UnsignedIntUtils} for conversion methods, e.g.
+ * <code>uint32().write("myint", UnsignedIntUtils.toInt16(50000))</code> will write a 16-bit
+ * unsigned integer with value 50000.</li>
+ * <li>{@link #float32()} / {@link #float64()}: Writer methods for float data sets, where the number
+ * as part of the name sets the size of the float type.</li>
+ * <li>{@link #time()} / {@link #duration()}: Writer methods for time stamp (or date) and for time
+ * duration data sets.</li>
+ * <li>{@link #string()}: Writer methods for string data sets.</li>
+ * <li>{@link #enumeration()}: Writer methods for enumeration data sets.</li>
+ * <li>{@link #compound()}: Writer methods for compound data sets.</li>
+ * <li>{@link #opaque()}: Writer methods for data sets that are "black boxes" to HDF5 which are
+ * called "opaque data sets" in HDF5 jargon. Here you can also find methods of reading arbitrary
+ * data sets as byte arrays.</li>
+ * <li>{@link #reference()}: Writer methods for HDF5 object references. Note that object references,
+ * though similar to hard links and symbolic links on the first glance, are quite different for
+ * HDF5.</li>
+ * </ul>
+ * </li>
+ * </ol>
+ * <p>
+ * Simple usage example:
+ * 
+ * <pre>
+ * float[] f = new float[100];
+ * ...
+ * IHDF5Writer writer = HDF5FactoryProvider.get().open(new File("test.h5"));
+ * writer.writeFloatArray("/some/path/dataset", f);
+ * writer.setStringAttribute("some key", "some value");
+ * writer.close();
+ * </pre>
+ * 
+ * @author Bernd Rinn
+ */
+ at SuppressWarnings("deprecation")
+public interface IHDF5Writer extends IHDF5Reader, IHDF5SimpleWriter, IHDF5LegacyWriter
+{
+    // /////////////////////
+    // File
+    // /////////////////////
+
+    /**
+     * Returns the handler for file-level information and status.
+     */
+    @Override
+    public IHDF5FileLevelReadWriteHandler file();
+
+    // /////////////////////////////////
+    // Objects, links, groups and types
+    // /////////////////////////////////
+
+    /**
+     * Returns an info provider and handler for HDF5 objects like links, groups, data sets and data
+     * types.
+     */
+    @Override
+    public IHDF5ObjectReadWriteInfoProviderHandler object();
+
+    // /////////////////////
+    // Opaque
+    // /////////////////////
+
+    /**
+     * Returns the full writer for opaque values.
+     */
+    @Override
+    public IHDF5OpaqueWriter opaque();
+
+    // /////////////////////
+    // Boolean
+    // /////////////////////
+
+    /**
+     * Returns the full writer for boolean values.
+     */
+    @Override
+    public IHDF5BooleanWriter bool();
+
+    // /////////////////////
+    // Bytes
+    // /////////////////////
+
+    /**
+     * Returns the full writer for byte / int8.
+     */
+    @Override
+    public IHDF5ByteWriter int8();
+
+    /**
+     * Returns the full writer for unsigned byte / uint8.
+     */
+    @Override
+    public IHDF5ByteWriter uint8();
+
+    // /////////////////////
+    // Short
+    // /////////////////////
+
+    /**
+     * Returns the full writer for short / int16.
+     */
+    @Override
+    public IHDF5ShortWriter int16();
+
+    /**
+     * Returns the full writer for unsigned short / uint16.
+     */
+    @Override
+    public IHDF5ShortWriter uint16();
+
+    // /////////////////////
+    // Int
+    // /////////////////////
+
+    /**
+     * Returns the full writer for int / int32.
+     */
+    @Override
+    public IHDF5IntWriter int32();
+
+    /**
+     * Returns the full writer for unsigned int / uint32.
+     */
+    @Override
+    public IHDF5IntWriter uint32();
+
+    // /////////////////////
+    // Long
+    // /////////////////////
+
+    /**
+     * Returns the full writer for long / int64.
+     */
+    @Override
+    public IHDF5LongWriter int64();
+
+    /**
+     * Returns the full writer for unsigned long / uint64.
+     */
+    @Override
+    public IHDF5LongWriter uint64();
+
+    // /////////////////////
+    // Float
+    // /////////////////////
+
+    /**
+     * Returns the full writer for float / float32.
+     */
+    @Override
+    public IHDF5FloatWriter float32();
+
+    // /////////////////////
+    // Double
+    // /////////////////////
+
+    /**
+     * Returns the full writer for long / float64.
+     */
+    @Override
+    public IHDF5DoubleWriter float64();
+
+    // /////////////////////
+    // Enums
+    // /////////////////////
+
+    /**
+     * Returns the full writer for enumerations.
+     */
+    @Override
+    public IHDF5EnumWriter enumeration();
+
+    // /////////////////////
+    // Compounds
+    // /////////////////////
+
+    /**
+     * Returns the full reader for compounds.
+     */
+    @Override
+    public IHDF5CompoundWriter compound();
+
+    // /////////////////////
+    // Strings
+    // /////////////////////
+
+    /**
+     * Returns the full writer for strings.
+     */
+    @Override
+    public IHDF5StringWriter string();
+
+    // /////////////////////
+    // Date & Time
+    // /////////////////////
+
+    /**
+     * Returns the full writer for date and times.
+     */
+    @Override
+    public IHDF5DateTimeWriter time();
+
+    /**
+     * Returns the full writer for time durations.
+     */
+    @Override
+    public IHDF5TimeDurationWriter duration();
+
+    // /////////////////////
+    // Object references
+    // /////////////////////
+
+    /**
+     * Returns the full reader for object references.
+     */
+    @Override
+    public IHDF5ReferenceWriter reference();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IHDF5WriterConfigurator.java b/source/java/ch/systemsx/cisd/hdf5/IHDF5WriterConfigurator.java
new file mode 100644
index 0000000..1d2371d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IHDF5WriterConfigurator.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+/**
+ * The configuration of the writer is done by chaining calls to configuration methods before calling
+ * {@link #writer()}.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5WriterConfigurator extends IHDF5ReaderConfigurator
+{
+    /**
+     * The mode of synchronizing changes (using a method like <code>fsync(2)</code>) to the HDF5
+     * file with the underlying storage. As <code>fsync(2)</code> is blocking, the synchonization is
+     * by default performed in a separate thread to minimize latency effects on the application. In
+     * order to ensure that <code>fsync(2)</code> is called in the same thread, use one of the
+     * <code>*_BLOCK</code> modes.
+     * <p>
+     * Note that non-blocking modes can have unexpected interactions with mandatory locks on
+     * Windows. The symptom of that will be that the program holds a lock to the HDF5 file for some
+     * (short) time even after the file has been closed. Thus, on Windows by default a blocking mode
+     * is chosen.
+     */
+    public enum SyncMode
+    {
+        /**
+         * Do not synchronize at all.
+         */
+        NO_SYNC,
+        /**
+         * Synchronize whenever {@link IHDF5FileLevelReadWriteHandler#flush()} or
+         * {@link IHDF5FileLevelReadWriteHandler#close()} are called.
+         */
+        SYNC,
+        /**
+         * Synchronize whenever {@link IHDF5FileLevelReadWriteHandler#flush()} or
+         * {@link IHDF5FileLevelReadWriteHandler#close()} are called. Block until synchronize is
+         * finished.
+         */
+        SYNC_BLOCK,
+        /**
+         * Synchronize whenever {@link IHDF5FileLevelReadWriteHandler#flush()} is called. <i>Default
+         * on Unix</i>
+         */
+        SYNC_ON_FLUSH,
+        /**
+         * Synchronize whenever {@link IHDF5FileLevelReadWriteHandler#flush()} is called. Block
+         * until synchronize is finished. <i>Default on Windows</i>.
+         */
+        SYNC_ON_FLUSH_BLOCK,
+    }
+
+    /**
+     * Specify file format compatibility settings.
+     */
+    public enum FileFormat
+    {
+        /**
+         * Enforce compatibility with HDF5 1.6 format.
+         */
+        STRICTLY_1_6,
+
+        /**
+         * Start with HDF5 1.6 format, but allow usage of features which require HDF5 1.8 library to
+         * read. <i>Default</i>.
+         */
+        ALLOW_1_8,
+
+        /**
+         * Enforce compatibility with HDF5 1.8 format.
+         */
+        STRICTLY_1_8;
+
+        /**
+         * Returns <code>true</code> if using HDF5 1.8 features is OK.
+         */
+        boolean isHDF5_1_8_OK()
+        {
+            return ordinal() > STRICTLY_1_6.ordinal();
+        }
+
+    }
+
+    /**
+     * The file will be truncated to length 0 if it already exists, that is its content will be
+     * deleted.
+     */
+    public IHDF5WriterConfigurator overwrite();
+
+    /**
+     * Use data types which can not be extended later on. This may reduce the initial size of the
+     * HDF5 file.
+     */
+    public IHDF5WriterConfigurator dontUseExtendableDataTypes();
+
+    /**
+     * Use simple data spaces for attributes.
+     */
+    public IHDF5WriterConfigurator useSimpleDataSpaceForAttributes();
+    
+    /**
+     * On writing a data set, keep the data set if it exists and only write the new data. This is
+     * equivalent to the <code>_KEEP</code> variants of {@link HDF5GenericStorageFeatures} and makes
+     * this behavior the default.
+     * <p>
+     * If this setting is not given, an existing data set will be deleted before the data set is
+     * written.
+     * <p>
+     * <i>Note:</i> If this configuration option is chosen, data types and storage features may only
+     * apply if the written data set does not yet exist. For example, it may lead to a string value
+     * being truncated on write if a string dataset with the same name and shorter length already
+     * exists.
+     */
+    public IHDF5WriterConfigurator keepDataSetsIfTheyExist();
+
+    /**
+     * Sets the file format compatibility for the writer.
+     */
+    public IHDF5WriterConfigurator fileFormat(FileFormat newFileFormat);
+
+    /**
+     * Sets the {@link SyncMode}.
+     */
+    public IHDF5WriterConfigurator syncMode(SyncMode newSyncMode);
+
+    /**
+     * Will try to perform numeric conversions where appropriate if supported by the platform.
+     * <p>
+     * <strong>Numeric conversions can be platform dependent and are not available on all platforms.
+     * Be advised not to rely on numeric conversions if you can help it!</strong>
+     */
+    @Override
+    public IHDF5WriterConfigurator performNumericConversions();
+
+    /**
+     * Sets UTF8 character encoding for all paths and all strings in this file. (The default is
+     * ASCII.)
+     */
+    @Override
+    public IHDF5WriterConfigurator useUTF8CharacterEncoding();
+
+    /**
+     * Switches off automatic dereferencing of unresolved references. Use this when you need to
+     * access file names that start with \0. The down-side of switching off automatic dereferencing
+     * is that you can't provide references as obtained by
+     * {@link IHDF5ReferenceReader#read(String, boolean)} with <code>resolveName=false</code> in
+     * places where a dataset path is required. <br>
+     * <i>Note: automatic dereferencing is switched on by default.</i>
+     */
+    @Override
+    public IHDF5WriterConfigurator noAutoDereference();
+
+    /**
+     * Sets the suffix that is used to mark and recognize house keeping files and groups. An empty
+     * string ("") encodes for the default, which is two leading and two trailing underscores
+     * ("__NAME__").
+     */
+    public IHDF5WriterConfigurator houseKeepingNameSuffix(String houseKeepingNameSuffix);
+
+    /**
+     * Returns an {@link IHDF5Writer} based on this configuration.
+     */
+    public IHDF5Writer writer();
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/IndexMap.java b/source/java/ch/systemsx/cisd/hdf5/IndexMap.java
new file mode 100644
index 0000000..855fb20
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/IndexMap.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.HashMap;
+
+/**
+ * A map for storing index to index value mapping.
+ *
+ * @author Bernd Rinn
+ */
+public class IndexMap extends HashMap<Integer, Long>
+{
+    private static final long serialVersionUID = 1L;
+    
+    /**
+     * Bind <code>index</code> to <code>indexValue</code>.
+     * 
+     * @return The map itself (for chained calls).
+     */
+    public IndexMap bind(int index, long indexValue)
+    {
+        put(index, indexValue);
+        return this;
+    }
+
+    /**
+     * Bind <code>index</code> to <code>indexValue</code>.
+     * 
+     * @return The map itself (for chained calls).
+     */
+    public IndexMap bind(int index, int indexValue)
+    {
+        put(index, (long) indexValue);
+        return this;
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/MatrixUtils.java b/source/java/ch/systemsx/cisd/hdf5/MatrixUtils.java
new file mode 100644
index 0000000..8113e29
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/MatrixUtils.java
@@ -0,0 +1,467 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.Array;
+import java.util.Arrays;
+import java.util.Map;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MDAbstractArray;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+
+/**
+ * Utilities for working with primitive matrices.
+ * <p>
+ * <i>This is an internal API that should not be expected to be stable between releases!</i>
+ * 
+ * @author Bernd Rinn
+ */
+final class MatrixUtils
+{
+
+    private MatrixUtils()
+    {
+        // Cannot be instantiated
+    }
+
+    static void checkMDArrayDimensions(final String name, final int[] dimensions,
+            final MDAbstractArray<?> array)
+    {
+        if (Arrays.equals(dimensions, array.dimensions()) == false)
+        {
+            throw new IllegalArgumentException("The member '" + name + "' has dimensions "
+                    + Arrays.toString(array.dimensions()) + " but is supposed to have dimensions "
+                    + Arrays.toString(dimensions) + ".");
+        }
+    }
+
+    static void checkMatrixDimensions(final String name, final int[] dimensions, final Object matrix)
+    {
+        final int dimX = Array.getLength(matrix);
+        final int dimY = Array.getLength(Array.get(matrix, 0));
+        if (dimensions.length != 2 || dimensions[0] != dimX || dimensions[1] != dimY)
+        {
+            throw new IllegalArgumentException("The member '" + name + "' has dimensions [" + dimX
+                    + "," + dimY + "]." + " but is supposed to have dimensions "
+                    + Arrays.toString(dimensions) + ".");
+        }
+    }
+
+    static float[] flatten(float[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final float[] result = new float[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static float[][] shapen(float[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final float[][] result = new float[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static double[] flatten(double[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final double[] result = new double[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static double[][] shapen(double[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final double[][] result = new double[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static int[] flatten(int[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final int[] result = new int[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static int[][] shapen(int[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final int[][] result = new int[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static long[] flatten(long[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final long[] result = new long[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static long[][] shapen(long[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final long[][] result = new long[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static short[] flatten(short[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final short[] result = new short[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static short[][] shapen(short[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final short[][] result = new short[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static byte[] flatten(byte[][] matrix)
+    {
+        if (matrix.length == 0)
+        {
+            throw new IllegalArgumentException("Matrix must not have a length of 0.");
+        }
+        final int dimY = matrix.length;
+        final int dimX = matrix[0].length;
+        for (int i = 1; i < dimY; ++i)
+        {
+            if (matrix[i].length != dimX)
+            {
+                throw new IllegalArgumentException(
+                        "All rows in matrix need to have the same number of columns.");
+            }
+        }
+        final byte[] result = new byte[dimX * dimY];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrix[i], 0, result, i * dimX, dimX);
+        }
+        return result;
+    }
+
+    static byte[][] shapen(byte[] matrixData, int[] dims)
+    {
+        final int dimY = dims[0];
+        final int dimX = dims[1];
+        final byte[][] result = new byte[dimY][dimX];
+        for (int i = 0; i < dimY; ++i)
+        {
+            System.arraycopy(matrixData, i * dimX, result[i], 0, dimX);
+        }
+        return result;
+    }
+
+    static boolean incrementIdx(int[] idx, int[] dims, int[] offset)
+    {
+        int d = idx.length - 1;
+        while (++idx[d] >= offset[d] + dims[d])
+        {
+            idx[d] = offset[d];
+            if (d == 0)
+            {
+                return false;
+            } else
+            {
+                --d;
+            }
+        }
+        return true;
+    }
+
+    static int[] concat(int[] array1, int[] array2)
+    {
+        if (array1.length == 0)
+        {
+            return array2;
+        }
+        if (array2.length == 0)
+        {
+            return array1;
+        }
+        final int[] result = new int[array1.length + array2.length];
+        System.arraycopy(array1, 0, result, 0, array1.length);
+        System.arraycopy(array2, 0, result, array1.length, array2.length);
+        return result;
+    }
+
+    static long[] concat(long[] array1, int[] array2)
+    {
+        if (array1.length == 0)
+        {
+            return MDArray.toLong(array2);
+        }
+        if (array2.length == 0)
+        {
+            return array1;
+        }
+        final long[] result = new long[array1.length + array2.length];
+        System.arraycopy(array1, 0, result, 0, array1.length);
+        for (int i = 0; i < array2.length; ++i)
+        {
+            result[array1.length + i] = array2[i];
+        }
+        return result;
+    }
+
+    static int cardinalityBoundIndices(Map<?, ?> boundIndices)
+    {
+        return boundIndices.size();
+    }
+
+    static int cardinalityBoundIndices(long[] boundIndices)
+    {
+        int card = 0;
+        for (int i = 0; i < boundIndices.length; ++i)
+        {
+            if (boundIndices[i] >= 0)
+            {
+                ++card;
+            }
+        }
+        return card;
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            Map<Integer, Long> boundIndices, final long[] fullDimensions,
+            final int[] fullBlockDimensions, final long[] fullOffset)
+    {
+        createFullBlockDimensionsAndOffset(blockDimensions, offsetOrNull, boundIndices,
+                fullDimensions.length, fullDimensions, fullBlockDimensions, fullOffset);
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            Map<Integer, Long> boundIndices, final int fullRank, final int[] fullBlockDimensions,
+            final long[] fullOffset)
+    {
+        createFullBlockDimensionsAndOffset(blockDimensions, offsetOrNull, boundIndices, fullRank,
+                null, fullBlockDimensions, fullOffset);
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            Map<Integer, Long> boundIndices, final int fullRank, final long[] fullDimensionsOrNull,
+            final int[] fullBlockDimensions, final long[] fullOffset)
+    {
+        int j = 0;
+        for (int i = 0; i < fullRank; ++i)
+        {
+            final Long boundIndexOrNull = boundIndices.get(i);
+            if (boundIndexOrNull == null)
+            {
+                if (blockDimensions[j] < 0 && fullDimensionsOrNull != null)
+                {
+                    blockDimensions[j] = (int) fullDimensionsOrNull[i];
+                }
+                fullBlockDimensions[i] = blockDimensions[j];
+                fullOffset[i] = (offsetOrNull == null) ? 0 : offsetOrNull[j];
+                ++j;
+            } else
+            {
+                fullBlockDimensions[i] = 1;
+                fullOffset[i] = boundIndexOrNull;
+            }
+        }
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            long[] boundIndices, final long[] fullDimensions, final int[] fullBlockDimensions,
+            final long[] fullOffset)
+    {
+        createFullBlockDimensionsAndOffset(blockDimensions, offsetOrNull, boundIndices,
+                fullDimensions.length, fullDimensions, fullBlockDimensions, fullOffset);
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            long[] boundIndices, final int fullRank, final int[] fullBlockDimensions,
+            final long[] fullOffset)
+    {
+        createFullBlockDimensionsAndOffset(blockDimensions, offsetOrNull, boundIndices, fullRank,
+                null, fullBlockDimensions, fullOffset);
+    }
+
+    static void createFullBlockDimensionsAndOffset(int[] blockDimensions, long[] offsetOrNull,
+            long[] boundIndices, final int fullRank, final long[] fullDimensionsOrNull,
+            final int[] fullBlockDimensions, final long[] fullOffset)
+    {
+        int j = 0;
+        for (int i = 0; i < fullRank; ++i)
+        {
+            final long boundIndex = boundIndices[i];
+            if (boundIndex < 0)
+            {
+                if (blockDimensions[j] < 0 && fullDimensionsOrNull != null)
+                {
+                    blockDimensions[j] = (int) fullDimensionsOrNull[i];
+                }
+                fullBlockDimensions[i] = blockDimensions[j];
+                fullOffset[i] = (offsetOrNull == null) ? 0 : offsetOrNull[j];
+                ++j;
+            } else
+            {
+                fullBlockDimensions[i] = 1;
+                fullOffset[i] = boundIndex;
+            }
+        }
+    }
+
+    static void checkBoundIndices(String objectPath, long[] dimensions, int cardBoundIndices)
+            throws HDF5JavaException
+    {
+        if (cardBoundIndices > dimensions.length)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath + ": more bound indices (#"
+                    + cardBoundIndices + ") than dataset dimensions (#" + dimensions.length + ")");
+        }
+    }
+
+    static void checkBoundIndices(String objectPath, long[] dimensions, long[] boundIndices)
+            throws HDF5JavaException
+    {
+        if (dimensions.length != boundIndices.length)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath + ": boundIndices array (#"
+                    + boundIndices.length + ") differs from dataset dimensions (#"
+                    + dimensions.length + ")");
+        }
+    }
+
+    static void checkBoundIndices(String objectPath, long[] dimensions, int[] blockDimensions,
+            int cardBoundIndices) throws HDF5JavaException
+    {
+        if (dimensions.length != blockDimensions.length + cardBoundIndices)
+        {
+            throw new HDF5JavaException("Dataset " + objectPath
+                    + ": cardinality of bound indices (#" + cardBoundIndices
+                    + ") plus rank of blocks (#" + blockDimensions.length
+                    + ") not equal to rank of dataset (#" + dimensions.length + ")");
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/PaddingUtils.java b/source/java/ch/systemsx/cisd/hdf5/PaddingUtils.java
new file mode 100644
index 0000000..c942e79
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/PaddingUtils.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A class with methods for padding of memory structures.
+ * <p>
+ * <i>This is an internal API that should not be expected to be stable between releases!</i>
+ * 
+ * @author Bernd Rinn
+ */
+class PaddingUtils
+{
+    private static final int machineWordSize = HDFNativeData.getMachineWordSize(); 
+    
+    private PaddingUtils()
+    {
+        // Cannot be instantiated
+    }
+
+    /**
+     * Compute the padded <code>offset</code> to have aligned access to variables of
+     * <code>elementSize</code>, or the size of the machine word, whatever is smaller.
+     */
+    static int padOffset(int offset, int elementSize)
+    {
+        if (elementSize > 0)
+        {
+            final int actualElementSize = Math.min(elementSize, machineWordSize);
+            int mod = offset % actualElementSize;
+            return (mod > 0) ? offset + actualElementSize - mod : offset;
+        } else
+        {
+            return offset;
+        }
+    }
+
+    /**
+     * Compute the maximal element size (in bytes). If the maximal element size is larger than the
+     * size of a machine word on this platform, return the size of a machine word instead.
+     */
+    static int findMaxElementSize(HDF5MemberByteifyer[] byteifyers)
+    {
+        int maxElementSize = 0;
+        for (HDF5MemberByteifyer b : byteifyers)
+        {
+            maxElementSize = Math.max(maxElementSize, b.getElementSize());
+            if (maxElementSize >= machineWordSize)
+            {
+                return machineWordSize;
+            }
+        }
+        return maxElementSize;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/ReflectionUtils.java b/source/java/ch/systemsx/cisd/hdf5/ReflectionUtils.java
new file mode 100644
index 0000000..307d443
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/ReflectionUtils.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for reflection, used for inferring the mapping between a compound data type and
+ * the fields of a Java class.
+ * <p>
+ * <i>This is an internal API that should not be expected to be stable between releases!</i>
+ * 
+ * @author Bernd Rinn
+ */
+public final class ReflectionUtils
+{
+
+    private ReflectionUtils()
+    {
+        // Cannot be instantiated
+    }
+
+    /**
+     * Returns a map from field names to fields for all fields in the given <var>clazz</var>.
+     */
+    public static Map<String, Field> getFieldMap(final Class<?> clazz)
+    {
+        return getFieldMap(clazz, true);
+    }
+
+    /**
+     * Returns a map from field names to fields for all fields in the given <var>clazz</var>.
+     * 
+     * @param clazz The clazz to get the fields from.
+     * @param excludeNonMappedFields If <code>true</code>, do not include fields to the map which
+     *            are not supposed to be mapped to HDF5 members.
+     */
+    public static Map<String, Field> getFieldMap(final Class<?> clazz,
+            boolean excludeNonMappedFields)
+    {
+        final Map<String, Field> map = new HashMap<String, Field>();
+        final CompoundType ct = clazz.getAnnotation(CompoundType.class);
+        final boolean includeAllFields =
+                excludeNonMappedFields ? ((ct != null) ? ct.mapAllFields() : true) : true;
+        for (Class<?> c = clazz; c != null; c = c.getSuperclass())
+        {
+            for (Field f : c.getDeclaredFields())
+            {
+                final CompoundElement e = f.getAnnotation(CompoundElement.class);
+                if (e != null && org.apache.commons.lang.StringUtils.isNotEmpty(e.memberName()))
+                {
+                    map.put(e.memberName(), f);
+                } else if (e != null || includeAllFields)
+                {
+                    map.put(f.getName(), f);
+                }
+            }
+        }
+        return map;
+    }
+
+    /**
+     * Ensures that the given <var>member</var> is accessible even if by definition it is not.
+     */
+    public static void ensureAccessible(final AccessibleObject memberOrNull)
+    {
+        if (memberOrNull != null && memberOrNull.isAccessible() == false)
+        {
+            memberOrNull.setAccessible(true);
+        }
+    }
+
+    /**
+     * Creates an object of <var>clazz</var> using the default constructor, making the default
+     * constructor accessible if necessary.
+     */
+    public static <T> Constructor<T> getDefaultConstructor(final Class<T> clazz)
+            throws SecurityException, NoSuchMethodException, IllegalArgumentException,
+            InstantiationException, IllegalAccessException, InvocationTargetException
+    {
+        final Constructor<T> defaultConstructor = clazz.getDeclaredConstructor();
+        ensureAccessible(defaultConstructor);
+        return defaultConstructor;
+
+    }
+
+    /**
+     * Creates an object of <var>clazz</var> using the default constructor, making the default
+     * constructor accessible if necessary.
+     */
+    public static <T> T newInstance(final Class<T> clazz) throws SecurityException,
+            NoSuchMethodException, IllegalArgumentException, InstantiationException,
+            IllegalAccessException, InvocationTargetException
+    {
+        final Constructor<T> defaultConstructor = clazz.getDeclaredConstructor();
+        ensureAccessible(defaultConstructor);
+        return defaultConstructor.newInstance();
+
+    }
+
+    /**
+     * Returns the enum options of the given <var>enumClass</var>. If <var>enumClass</var> is not an
+     * enum class, return an empty array.
+     */
+    public static String[] getEnumOptions(Class<? extends Enum<?>> enumClass)
+    {
+        final Enum<?>[] constants = enumClass.getEnumConstants();
+        if (constants == null)
+        {
+            return new String[0];
+        }
+        final String[] options = new String[constants.length];
+        for (int i = 0; i < options.length; ++i)
+        {
+            options[i] = constants[i].name();
+        }
+        return options;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/StringUtils.java b/source/java/ch/systemsx/cisd/hdf5/StringUtils.java
new file mode 100644
index 0000000..f7b4949
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/StringUtils.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.UnsupportedEncodingException;
+
+/**
+ * Some auxiliary methods for String to Byte conversion.
+ * <p>
+ * <i>This is an internal API that should not be expected to be stable between releases!</i>
+ * 
+ * @author Bernd Rinn
+ */
+public final class StringUtils
+{
+    private StringUtils()
+    {
+        // Not to be instantiated.
+    }
+
+    /**
+     * Converts string <var>s</var> to a byte array of a 0-terminated string, using
+     * <var>encoding</var> and cutting it to <var>maxLength</var> if necessary.
+     */
+    public static byte[] toBytes0Term(String s, int maxCharacters, CharacterEncoding encoding)
+    {
+        try
+        {
+            return (cut(s, maxCharacters) + '\0').getBytes(encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return (cut(s, maxCharacters) + '\0').getBytes();
+        }
+    }
+
+    /**
+     * Converts string <var>s</var> to a byte array of a 0-terminated string, using
+     * <var>encoding</var>.
+     */
+    public static byte[] toBytes0Term(String s, CharacterEncoding encoding)
+    {
+        try
+        {
+            return (s + '\0').getBytes(encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return (s + '\0').getBytes();
+        }
+    }
+
+    /**
+     * Converts string <var>s</var> to a byte array of a string, using <var>encoding</var> and
+     * cutting it to <var>maxLength</var> characters.
+     */
+    static byte[] toBytes(String s, int maxLength, CharacterEncoding encoding)
+    {
+        try
+        {
+            return (cut(s, maxLength)).getBytes(encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return (cut(s, maxLength)).getBytes();
+        }
+    }
+
+    /**
+     * Converts string <var>s</var> to a byte array of a string, using <var>encoding</var>.
+     */
+    static byte[] toBytes(String s, CharacterEncoding encoding)
+    {
+        try
+        {
+            return s.getBytes(encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return s.getBytes();
+        }
+    }
+
+    /**
+     * Converts string array <var>in</var> to a byte array, using
+     * <var>encoding</var> and cutting it to <var>maxLength</var< if necessary.
+     */
+    static byte[] toBytes(final String[] in, final int maxLength,
+            final CharacterEncoding encoding)
+    {
+        final int nelems = in.length;
+        final int realMaxLength = encoding.getMaxBytesPerChar() * maxLength;
+        final byte[] out = new byte[nelems * realMaxLength];
+
+        for (int i = 0; i < nelems; i++)
+        {
+            final byte[] bytes = toBytes(in[i], maxLength, encoding);
+            System.arraycopy(bytes, 0, out, i * realMaxLength, bytes.length);
+        }
+        return out;
+    }
+
+    /**
+     * Converts string array <var>in</var> to a byte array of a 0-terminated string, using
+     * <var>encoding</var> and cutting it to <var>maxLength</var< if necessary.
+     */
+    static byte[] toBytes0Term(final String[] in, final int maxLength,
+            final CharacterEncoding encoding)
+    {
+        final int nelems = in.length;
+        final int realMaxLength = encoding.getMaxBytesPerChar() * maxLength + 1;
+        final byte[] out = new byte[nelems * realMaxLength];
+
+        for (int i = 0; i < nelems; i++)
+        {
+            final byte[] bytes = toBytes0Term(in[i], maxLength, encoding);
+            System.arraycopy(bytes, 0, out, i * realMaxLength, bytes.length);
+        }
+        return out;
+    }
+
+    /**
+     * Converts byte array <var>data</var> containing a 0-terminated string using
+     * <var>encoding</var> to a string.
+     */
+    static String fromBytes0Term(byte[] data, CharacterEncoding encoding)
+    {
+        return fromBytes0Term(data, 0, data.length, encoding);
+    }
+
+    /**
+     * Converts byte array <var>data</var> containing a 0-terminated string at <var>startIdx</var>
+     * using <var>encoding</var> to a string. Does search further than <var>maxEndIdx</var>
+     */
+    static String fromBytes0Term(byte[] data, int startIdx, int maxEndIdx,
+            CharacterEncoding encoding)
+    {
+        int termIdx;
+        for (termIdx = startIdx; termIdx < maxEndIdx && data[termIdx] != 0; ++termIdx)
+        {
+        }
+        try
+        {
+            return new String(data, startIdx, termIdx - startIdx, encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return new String(data, startIdx, termIdx - startIdx);
+        }
+    }
+
+    /**
+     * Converts the first <var>length</var> bytes of byte array <var>data</var> containing a string
+     * using <var>encoding</var> to a string.
+     */
+    static String fromBytes(byte[] data, int length, CharacterEncoding encoding)
+    {
+        return fromBytes(data, 0, length, encoding);
+    }
+
+    /**
+     * Converts byte array <var>data</var> containing a string using <var>encoding</var> to a
+     * string.
+     */
+    static String fromBytes(byte[] data, CharacterEncoding encoding)
+    {
+        return fromBytes(data, 0, data.length, encoding);
+    }
+
+    /**
+     * Converts byte array <var>data</var> containing a string from <var>startIdx</var> to
+     * <var>endIdx</var> using <var>encoding</var> to a string.
+     */
+    static String fromBytes(byte[] data, int startIdx, int endIdx, CharacterEncoding encoding)
+    {
+        try
+        {
+            return new String(data, startIdx, endIdx - startIdx, encoding.getCharSetName());
+        } catch (UnsupportedEncodingException ex)
+        {
+            return new String(data, startIdx, endIdx - startIdx);
+        }
+    }
+
+    private static String cut(String s, int maxLength)
+    {
+        if (s.length() > maxLength)
+        {
+            return s.substring(0, maxLength);
+        } else
+        {
+            return s;
+        }
+    }
+
+    /**
+     * Cuts or pads <var>value</var> to <var>length</var>.
+     */
+    static byte[] cutOrPadBytes(byte[] value, int length)
+    {
+        if (value.length == length)
+        {
+            return value;
+        } else
+        {
+            final byte[] newValue = new byte[length];
+            System.arraycopy(value, 0, newValue, 0, Math.min(value.length, length));
+            return newValue;
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/UnsignedIntUtils.java b/source/java/ch/systemsx/cisd/hdf5/UnsignedIntUtils.java
new file mode 100644
index 0000000..0114a39
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/UnsignedIntUtils.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.math.BigInteger;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+
+/**
+ * Utilities for converting signed integers to unsigned integers and vice versa.
+ * 
+ * @author Bernd Rinn
+ */
+public final class UnsignedIntUtils
+{
+    private final static short MAX_UINT_8_P1 = 256;
+
+    private final static short MAX_UINT_8 = MAX_UINT_8_P1 - 1;
+
+    private final static int MAX_UINT_16_P1 = 65536;
+
+    private final static int MAX_UINT_16 = MAX_UINT_16_P1 - 1;
+
+    private final static long MAX_UINT_32_P1 = 4294967296L;
+
+    private final static long MAX_UINT_32 = MAX_UINT_32_P1 - 1;
+
+    private final static BigInteger MAX_UINT_64_P1 = new BigInteger("2").pow(64);
+
+    private final static BigInteger MAX_UINT_64 = MAX_UINT_64_P1.subtract(BigInteger.ONE);
+
+    /**
+     * Converts <var>value</var> to <code>int8</code>.
+     * 
+     * @throws IllegalArgumentException if <var>value</var> is either negative or too large to fit
+     *             into <code>uint8</code>.
+     */
+    public static byte toInt8(int value) throws IllegalArgumentException
+    {
+        if (value < 0 || value > MAX_UINT_8)
+        {
+            throw new IllegalArgumentException("Value " + Integer.toString(value)
+                    + " cannot be converted to uint8.");
+        }
+        return (byte) value;
+    }
+
+    /**
+     * Converts <var>value</var> as <code>int16</code>.
+     * 
+     * @throws IllegalArgumentException if <var>value</var> is either negative or too large to fit
+     *             into <code>uint16</code>.
+     */
+    public static short toInt16(int value) throws IllegalArgumentException
+    {
+        if (value < 0 || value > MAX_UINT_16)
+        {
+            throw new IllegalArgumentException("Value " + Integer.toString(value)
+                    + " cannot be converted to uint16.");
+        }
+        return (short) value;
+    }
+
+    /**
+     * Converts <var>value</var> as <code>int32</code>.
+     * 
+     * @throws IllegalArgumentException if <var>value</var> is either negative or too large to fit
+     *             into <code>uint32</code>.
+     */
+    public static int toInt32(long value) throws IllegalArgumentException
+    {
+        if (value < 0 || value > MAX_UINT_32)
+        {
+            throw new IllegalArgumentException("Value " + Long.toString(value)
+                    + " cannot be converted to uint32.");
+        }
+        return (int) value;
+    }
+
+    /**
+     * Converts <var>value</var> as <code>int64</code>.
+     * 
+     * @throws IllegalArgumentException if <var>value</var> is either negative or too large to fit
+     *             into <code>uint64</code>.
+     */
+    public static long toInt64(BigInteger value) throws IllegalArgumentException
+    {
+        if (value.compareTo(BigInteger.ZERO) < 0 || value.compareTo(MAX_UINT_64) > 0)
+        {
+            throw new IllegalArgumentException("Value " + value.toString()
+                    + " cannot be converted to uint64.");
+        }
+        return value.longValue();
+    }
+
+    /**
+     * Converts <var>value</var> to <code>uint8</code>.
+     */
+    public static short toUint8(byte value)
+    {
+        return (short) (value < 0 ? MAX_UINT_8_P1 + value : value);
+    }
+
+    /**
+     * Converts <var>value</var> to <code>uint16</code>.
+     */
+    public static int toUint16(short value)
+    {
+        return value < 0 ? MAX_UINT_16_P1 + value : value;
+    }
+
+    /**
+     * Converts <var>value</var> to <code>uint32</code>.
+     */
+    public static long toUint32(int value)
+    {
+        return value < 0 ? MAX_UINT_32_P1 + value : value;
+    }
+
+    /**
+     * Converts <var>value</var> to <code>uint64</code>.
+     */
+    public static BigInteger toUint64(long value)
+    {
+        return new BigInteger(1, NativeData.longToByte(new long[]
+            { value }, ByteOrder.BIG_ENDIAN));
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpCallable.java b/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpCallable.java
new file mode 100644
index 0000000..3f72982
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpCallable.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.cleanup;
+
+/**
+ * A class that implements the logic of cleaning up a resource even in case of an exception but
+ * re-throws an exception of the clean up procedure only when the main procedure didn't throw one.
+ * <code>CleanUpRunner</code>s can be stacked.
+ * <p>
+ * <em>This is an internal implementation class that is not meant to be used by users of the library.</em>
+ * 
+ * @author Bernd Rinn
+ */
+public final class CleanUpCallable
+{
+    /**
+     * Runs a {@link ICallableWithCleanUp} and ensures that all registered clean-ups are performed
+     * afterwards.
+     */
+    public <T> T call(ICallableWithCleanUp<T> runnable)
+    {
+        final CleanUpRegistry registry = new CleanUpRegistry();
+        boolean exceptionThrown = true;
+        try
+        {
+            T result = runnable.call(registry);
+            exceptionThrown = false;
+            return result;
+        } finally
+        {
+            registry.cleanUp(exceptionThrown);
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpRegistry.java b/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpRegistry.java
new file mode 100644
index 0000000..a1182d0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/cleanup/CleanUpRegistry.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.cleanup;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A class that allows registering items for clean up and that allows to perform the clean up later.
+ * <p>
+ * <em>This is an internal implementation class that is not meant to be used by users of the library.</em>
+ * 
+ * @author Bernd Rinn
+ */
+public class CleanUpRegistry implements ICleanUpRegistry
+{
+    private final List<Runnable> cleanUpList = new ArrayList<Runnable>();
+
+    /**
+     * Creates a synchronized version of a {@link CleanUpRegistry}. 
+     */
+    public static CleanUpRegistry createSynchonized()
+    {
+        return new CleanUpRegistry()
+            {
+                @Override
+                public synchronized void registerCleanUp(Runnable cleanUp)
+                {
+                    super.registerCleanUp(cleanUp);
+                }
+
+                @Override
+                public synchronized void cleanUp(boolean suppressExceptions)
+                {
+                    super.cleanUp(suppressExceptions);
+                }
+            };
+    }
+    
+    @Override
+    public void registerCleanUp(Runnable cleanUp)
+    {
+        cleanUpList.add(cleanUp);
+    }
+
+    /**
+     * Performs all clean-ups registered with {@link #registerCleanUp(Runnable)}.
+     * 
+     * @param suppressExceptions If <code>true</code>, all exceptions that happen during clean-up
+     *            will be suppressed.
+     */
+    public void cleanUp(boolean suppressExceptions)
+    {
+        RuntimeException exceptionDuringCleanUp = null;
+        for (int i = cleanUpList.size() - 1; i >= 0; --i)
+        {
+            final Runnable runnable = cleanUpList.get(i);
+            try
+            {
+                runnable.run();
+            } catch (RuntimeException ex)
+            {
+                if (suppressExceptions == false && exceptionDuringCleanUp == null)
+                {
+                    exceptionDuringCleanUp = ex;
+                }
+            }
+        }
+        cleanUpList.clear();
+        if (exceptionDuringCleanUp != null)
+        {
+            throw exceptionDuringCleanUp;
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/cleanup/ICallableWithCleanUp.java b/source/java/ch/systemsx/cisd/hdf5/cleanup/ICallableWithCleanUp.java
new file mode 100644
index 0000000..5781142
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/cleanup/ICallableWithCleanUp.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.cleanup;
+
+/**
+ * A role that calls a method which requires one or more clean-up steps that need to be run reliably
+ * at the end of the method regardless of whether the method is finished normally or whether it
+ * exits with an exception.
+ * <p>
+ * <em>This is an internal interface that is not meant to be used by users of the library.</em>
+ * 
+ * @author Bernd Rinn
+ */
+public interface ICallableWithCleanUp<T>
+{
+
+    /** Calls the method requiring clean-up. */
+    public T call(ICleanUpRegistry registry);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/cleanup/ICleanUpRegistry.java b/source/java/ch/systemsx/cisd/hdf5/cleanup/ICleanUpRegistry.java
new file mode 100644
index 0000000..9281c20
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/cleanup/ICleanUpRegistry.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.cleanup;
+
+/**
+ * A role that allows to register a clean-up method that is called regardless of whether an
+ * exception occurs or not.
+ * <p>
+ * <em>This is an internal interface that is not meant to be used by users of the library.</em>
+ * 
+ * @author Bernd Rinn
+ */
+public interface ICleanUpRegistry
+{
+
+    /**
+     * Register a clean-up to run when the main {@link Runnable} has been executed.
+     */
+    public void registerCleanUp(Runnable cleanUp);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntry.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntry.java
new file mode 100644
index 0000000..e3a8f26
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntry.java
@@ -0,0 +1,733 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.hdf5.h5ar.HDF5ArchiveUpdater.DataSetInfo;
+
+/**
+ * An entry of an archive listing.
+ * 
+ * @author Bernd Rinn
+ */
+public final class ArchiveEntry
+{
+    private final String path;
+
+    private final String parentPath;
+
+    private final String name;
+
+    private final String realPath;
+
+    private final String realParentPath;
+
+    private final String realName;
+
+    private final ArchiveEntryCompleteness completeness;
+
+    private final boolean hasLinkTarget;
+
+    private final String linkTarget;
+
+    private final FileLinkType linkType;
+
+    private final FileLinkType verifiedLinkType;
+
+    private final long size;
+
+    private long verifiedSize;
+
+    private final long lastModified;
+
+    private final long verifiedLastModified;
+
+    private int crc32;
+
+    private boolean knowsChecksum;
+
+    private int verifiedCrc32;
+
+    private final int uid;
+
+    private final int gid;
+
+    private final IdCache idCache;
+
+    private final short permissions;
+
+    private final String errorLineOrNull;
+
+    ArchiveEntry(String dir, String path, LinkRecord link, IdCache idCache)
+    {
+        this(dir, path, link, idCache, null);
+    }
+
+    ArchiveEntry(String dir, String path, LinkRecord link, IdCache idCache, String errorLineOrNull)
+    {
+        this.parentPath = (dir != null) ? dir : Utils.getParentPath(path);
+        this.realParentPath = parentPath;
+        this.path = path;
+        this.realPath = path;
+        this.name = link.getLinkName();
+        this.realName = name;
+        this.idCache = idCache;
+        this.completeness = link.getCompleteness();
+        this.hasLinkTarget = (link.tryGetLinkTarget() != null);
+        this.linkTarget = hasLinkTarget ? link.tryGetLinkTarget() : "?";
+        this.linkType = link.getLinkType();
+        this.verifiedLinkType = link.getVerifiedType();
+        this.size = link.getSize();
+        this.verifiedSize = link.getVerifiedSize();
+        this.lastModified = link.getLastModified();
+        this.verifiedLastModified = link.getVerifiedLastModified();
+        this.crc32 = link.getCrc32();
+        this.knowsChecksum = link.hasCRC32Checksum();
+        this.verifiedCrc32 = link.getVerifiedCrc32();
+        this.uid = link.getUid();
+        this.gid = link.getGid();
+        this.permissions = link.getPermissions();
+        this.errorLineOrNull = errorLineOrNull;
+    }
+
+    ArchiveEntry(ArchiveEntry pathInfo, ArchiveEntry linkInfo)
+    {
+        this.parentPath = pathInfo.parentPath;
+        this.path = pathInfo.path;
+        this.name = pathInfo.name;
+        this.realParentPath = linkInfo.parentPath;
+        this.realPath = linkInfo.realPath;
+        this.realName = linkInfo.name;
+        this.idCache = pathInfo.idCache;
+        this.completeness = linkInfo.completeness;
+        this.hasLinkTarget = linkInfo.hasLinkTarget;
+        this.linkTarget = linkInfo.linkTarget;
+        this.linkType = linkInfo.linkType;
+        this.verifiedLinkType = linkInfo.verifiedLinkType;
+        this.size = linkInfo.size;
+        this.verifiedSize = linkInfo.verifiedSize;
+        this.lastModified = Math.max(pathInfo.lastModified, linkInfo.lastModified);
+        this.verifiedLastModified =
+                Math.max(pathInfo.verifiedLastModified, linkInfo.verifiedLastModified);
+        this.crc32 = linkInfo.crc32;
+        this.knowsChecksum = linkInfo.knowsChecksum;
+        this.verifiedCrc32 = linkInfo.verifiedCrc32;
+        this.uid = linkInfo.uid;
+        this.gid = linkInfo.gid;
+        this.permissions = linkInfo.permissions;
+        this.errorLineOrNull = null;
+    }
+
+    ArchiveEntry(String errorLineOrNull)
+    {
+        this.errorLineOrNull = errorLineOrNull;
+        this.path = null;
+        this.parentPath = null;
+        this.name = null;
+        this.realPath = null;
+        this.realParentPath = null;
+        this.realName = null;
+        this.idCache = null;
+        this.completeness = null;
+        this.linkTarget = null;
+        this.hasLinkTarget = false;
+        this.linkType = null;
+        this.verifiedLinkType = null;
+        this.size = Utils.UNKNOWN;
+        this.verifiedSize = Utils.UNKNOWN;
+        this.lastModified = Utils.UNKNOWN;
+        this.verifiedLastModified = Utils.UNKNOWN;
+        this.crc32 = 0;
+        this.verifiedCrc32 = 0;
+        this.uid = Utils.UNKNOWN;
+        this.gid = Utils.UNKNOWN;
+        this.permissions = Utils.UNKNOWN_S;
+    }
+
+    void setDataSetInfo(DataSetInfo dataSetInfo)
+    {
+        this.verifiedSize = dataSetInfo.size;
+        this.crc32 = dataSetInfo.crc32;
+        this.verifiedCrc32 = crc32;
+        this.knowsChecksum = true;
+    }
+
+    /**
+     * Returns the full path of this entry.
+     */
+    public String getPath()
+    {
+        return path;
+    }
+
+    /**
+     * Returns the parent directory of the path of this entry.
+     */
+    public String getParentPath()
+    {
+        return parentPath;
+    }
+
+    /**
+     * Returns the name of the path this entry.
+     */
+    public String getName()
+    {
+        return name;
+    }
+
+    /**
+     * Returns the real full path of this entry.
+     * <p>
+     * This will be the same as {@link #getPath()}, except when it originates from a call to
+     * {@link IHDF5ArchiveInfoProvider#tryGetResolvedEntry(String, boolean)} with
+     * <code>keepPath=true</code> where it will be the path of the link target.
+     */
+    public String getRealPath()
+    {
+        return realPath;
+    }
+
+    /**
+     * Returns the real parent directory of the path of this entry.
+     * <p>
+     * This will be the same as {@link #getPath()}, except when it originates from a call to
+     * {@link IHDF5ArchiveInfoProvider#tryGetResolvedEntry(String, boolean)} with
+     * <code>keepPath=true</code> where it will be the parent path of the link target.
+     */
+    public String getRealParentPath()
+    {
+        return realParentPath;
+    }
+
+    /**
+     * Returns the name of the path this entry.
+     * <p>
+     * This will be the same as {@link #getPath()}, except when it originates from a call to
+     * {@link IHDF5ArchiveInfoProvider#tryGetResolvedEntry(String, boolean)} with
+     * <code>keepPath=true</code> where it will be the name of the link target.
+     */
+    public String getRealName()
+    {
+        return realName;
+    }
+
+    /**
+     * Returns how complete this entry is.
+     * <p>
+     * {@link ArchiveEntryCompleteness#BASE} entries can occur if the archive does not contain valid
+     * file attributes, {@link ArchiveEntryCompleteness#LAST_MODIFIED} entries can occur if the
+     * archive has been created or updated on a non-POSIX (read: Microsoft Windows) machine.
+     */
+    public ArchiveEntryCompleteness getCompleteness()
+    {
+        return completeness;
+    }
+
+    /**
+     * Returns the link target. May be "?" if the link target has not been readm or if this entry
+     * does not represent a link.
+     * 
+     * @see #hasLinkTarget()
+     */
+    public String getLinkTarget()
+    {
+        return linkTarget;
+    }
+
+    /**
+     * Returns <code>true</code>, if this entry has a meaningful link target.
+     * 
+     * @see #getLinkTarget()
+     */
+    public boolean hasLinkTarget()
+    {
+        return hasLinkTarget;
+    }
+
+    /**
+     * Returns the type of this entry.
+     */
+    public FileLinkType getLinkType()
+    {
+        return linkType;
+    }
+
+    /**
+     * Returns if this entry is of type {@link FileLinkType#DIRECTORY}.
+     */
+    public boolean isDirectory()
+    {
+        return linkType == FileLinkType.DIRECTORY;
+    }
+
+    /**
+     * Returns if this entry is of type {@link FileLinkType#SYMLINK}.
+     */
+    public boolean isSymLink()
+    {
+        return linkType == FileLinkType.SYMLINK;
+    }
+
+    /**
+     * Returns if this entry is of type {@link FileLinkType#REGULAR_FILE}.
+     */
+    public boolean isRegularFile()
+    {
+        return linkType == FileLinkType.REGULAR_FILE;
+    }
+
+    /**
+     * Returns the size of this entry, if this entry is a regular file, or 0 otherwise.
+     * 
+     * @see #isRegularFile()
+     */
+    public long getSize()
+    {
+        return size;
+    }
+
+    /**
+     * Returns the date and time of last modification of this entry, measured in seconds since the
+     * epoch (00:00:00 GMT, January 1, 1970), or -1, if this information is not available.
+     */
+    public long getLastModified()
+    {
+        return lastModified;
+    }
+
+    /**
+     * Returns a string representation of the date and time of last modification of this entry, or
+     * "?", if this information is not available.
+     */
+    public String getLastModifiedStr()
+    {
+        return getLastModifiedStr(lastModified);
+    }
+
+    private static String getLastModifiedStr(long lastModified)
+    {
+        if (lastModified >= 0)
+        {
+            return String.format("%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS", lastModified
+                    * Utils.MILLIS_PER_SECOND);
+        } else
+        {
+            return "?";
+        }
+
+    }
+
+    /**
+     * Returns <code>true</code>, if this archive entry has a CRC32 checksum stored.
+     */
+    public boolean hasChecksum()
+    {
+        return knowsChecksum;
+    }
+
+    /**
+     * Returns the CRC32 checksum of this entry, or 0, if this information is not available or if
+     * this entry is not a regular file.
+     * 
+     * @see #isRegularFile()
+     */
+    public int getCrc32()
+    {
+        return crc32;
+    }
+
+    /**
+     * Returns a string representation (using hexadecimal digits) of the CRC32 checksum of this
+     * entry, or "00000000", if this information is not available or if this entry is not a regular
+     * file.
+     * 
+     * @see #isRegularFile()
+     */
+    public String getCrc32Str()
+    {
+        return Utils.crc32ToString(crc32);
+    }
+
+    /**
+     * Returns a string representation of the user owning this archive entry, or "?", if this
+     * information is not available.
+     * <p>
+     * Note that the archive only stores the UID and it is the local system which is used to resolve
+     * the UID to a user.
+     */
+    public String getUser(boolean numeric)
+    {
+        return (uid >= 0) ? idCache.getUser(uid, numeric) : "?";
+    }
+
+    /**
+     * Returns the UID of the user owning this archive entry, or -1, if this information is not
+     * available.
+     */
+    public int getUid()
+    {
+        return uid;
+    }
+
+    /**
+     * Returns a string representation of the group owning this archive entry, or "?", if this
+     * information is not available.
+     * <p>
+     * Note that the archive only stores the GID and it is the local system which is used to resolve
+     * the GID to a group.
+     */
+    public String getGroup(boolean numeric)
+    {
+        return (gid >= 0) ? idCache.getGroup(gid, numeric) : "?";
+    }
+
+    /**
+     * Returns the GID of the group owning this archive entry, or -1, if this information is not
+     * available.
+     */
+    public int getGid()
+    {
+        return gid;
+    }
+
+    /**
+     * Returns the access permissions of this archive entry, or -1, if this information is not
+     * available.
+     */
+    public short getPermissions()
+    {
+        return permissions;
+    }
+
+    /**
+     * Returns a string representation of the access permissions of this archive entry, or "?", if
+     * this information is not available.
+     */
+    public String getPermissionsString(boolean numeric)
+    {
+        return (permissions >= 0) ? Utils.permissionsToString(permissions,
+                linkType == FileLinkType.DIRECTORY, numeric) : "?";
+    }
+
+    /**
+     * Returns the error line saved for this archive entry, or <code>null</code>, if no error line
+     * is available. A non-null error line is one indication of a verification error.
+     * <p>
+     * Note that the error line may contain additional information when a verification step has
+     * failed on the archive entry.
+     */
+    public String tryGetErrorLine()
+    {
+        return errorLineOrNull;
+    }
+
+    /**
+     * Returns the verified type of this entry, or <code>null</code>, if no verification has been
+     * performed on this entry.
+     * <p>
+     * This information may come from an internal test of the archive (see
+     * {@link ListParameters#isTestArchive()}) or from a verification of the archive against the
+     * filesystem (see {@link VerifyParameters}).
+     */
+    public FileLinkType tryGetVerifiedLinkType()
+    {
+        return verifiedLinkType;
+    }
+
+    /**
+     * Returns the verified size of this archive entry, or -1, if this information is not available.
+     */
+    public long getVerifiedSize()
+    {
+        return verifiedSize;
+    }
+
+    /**
+     * Returns the verified CRC32 checksum of this archive entry, or 0, if this information is not
+     * available.
+     */
+    public int getVerifiedCrc32()
+    {
+        return verifiedCrc32;
+    }
+
+    /**
+     * Returns a string representation (using hexadecimal digits) of the verified CRC32 checksum of
+     * this entry, or "00000000", if this information is not available or if this entry is not a
+     * regular file.
+     * 
+     * @see #isRegularFile()
+     */
+    public String getVerifiedCrc32Str()
+    {
+        return Utils.crc32ToString(verifiedCrc32);
+    }
+
+    /**
+     * Returns the verified date and time of last modification of this entry, measured in seconds
+     * since the epoch (00:00:00 GMT, January 1, 1970), or -1, if this information is not available.
+     */
+    public long getVerifiedLastModified()
+    {
+        return verifiedLastModified;
+    }
+
+    /**
+     * Returns a string representation of the verified date and time of last modification of this
+     * entry, or "?", if this information is not available.
+     */
+    public String getVerifiedLastModifiedStr()
+    {
+        return getLastModifiedStr(verifiedLastModified);
+    }
+
+    /**
+     * Returns true, if this entry has verification information on archive integrity.
+     */
+    public boolean hasVerificationInfo()
+    {
+        return (verifiedLinkType != null || verifiedSize != -1 || verifiedCrc32 != 0
+                || verifiedLastModified != -1 || errorLineOrNull != null);
+    }
+
+    /**
+     * Returns <code>true</code> if this archive entry has been verified successfully (or if no
+     * verification information is available).
+     */
+    public boolean isOK()
+    {
+        return (errorLineOrNull == null) && linkTypeOK() && sizeOK() && lastModifiedOK()
+                && checksumOK();
+    }
+
+    /**
+     * Returns <code>true</code> if this the type of this archive entry has been verified
+     * successfully (or if no verification information for the type is available).
+     */
+    public boolean linkTypeOK()
+    {
+        return (verifiedLinkType == null) || (linkType == verifiedLinkType);
+    }
+
+    /**
+     * Returns <code>true</code> if this the size of this archive entry has been verified
+     * successfully (or if no verification information for the size is available).
+     */
+    public boolean sizeOK()
+    {
+        return (verifiedSize == Utils.UNKNOWN) || (size == verifiedSize);
+    }
+
+    /**
+     * Returns <code>true</code> if this the last modification date of this archive entry has been
+     * verified successfully (or if no verification information for the last modification date is
+     * available).
+     */
+    public boolean lastModifiedOK()
+    {
+        return (verifiedLastModified == Utils.UNKNOWN) || (lastModified == Utils.UNKNOWN)
+                || (lastModified == verifiedLastModified);
+    }
+
+    /**
+     * Returns <code>true</code> if this the checksum of this archive entry has been verified
+     * successfully (or if no verification information for the checksum is available).
+     */
+    public boolean checksumOK()
+    {
+        return (false == knowsChecksum) || (verifiedSize == Utils.UNKNOWN) ||(crc32 == verifiedCrc32);
+    }
+
+    /**
+     * Returns a status string for this entry.
+     * <p>
+     * Note that the status will alway be <code>OK</code> if no verification information is
+     * available.
+     * 
+     * @see #hasVerificationInfo()
+     */
+    public String getStatus(boolean verbose)
+    {
+        if (isOK() == false)
+        {
+            if (errorLineOrNull != null)
+            {
+                return "ERROR: " + errorLineOrNull;
+            } else if (linkTypeOK() == false)
+            {
+                return verbose ? String.format(
+                        "ERROR: Entry '%s' failed link type test, expected: %s, found: %s.", path,
+                        linkType, verifiedLinkType) : "WRONG TYPE";
+            } else if (sizeOK() == false)
+            {
+                return verbose ? String.format(
+                        "ERROR: Entry '%s' failed size test, expected: %d, found: %d.", path, size,
+                        verifiedSize) : "WRONG SIZE";
+            } else if (checksumOK() == false)
+            {
+                return verbose ? String.format(
+                        "ERROR: Entry '%s' failed CRC checksum test, expected: %s, found: %s.",
+                        path, Utils.crc32ToString(crc32), Utils.crc32ToString(verifiedCrc32))
+                        : "WRONG CRC32";
+            } else if (lastModifiedOK() == false)
+            {
+                return verbose ? String
+                        .format("ERROR: Entry '%s' failed last modification test, expected: %s, found: %s.",
+                                path, getLastModifiedStr(), getVerifiedLastModifiedStr())
+                        : "WRONG LASTMODIFICATION";
+            }
+        }
+        return "OK";
+    }
+
+    /**
+     * Returns a (verbose) string description of this entry, including the (brief) verification
+     * status, if available.
+     */
+    public String describeLink()
+    {
+        return describeLink(true, false, true);
+    }
+
+    /**
+     * Returns a string description of this entry, including the (brief) verification status, if
+     * available.
+     * 
+     * @param verbose If <code>true</code>, the link description will contain all information
+     *            available, if <code>false</code>, it will only contain the path information.
+     */
+    public String describeLink(boolean verbose)
+    {
+        return describeLink(verbose, false, true);
+    }
+
+    /**
+     * Returns a string description of this entry, including the (brief) verification status, if
+     * available.
+     * 
+     * @param verbose If <code>true</code>, the link description will contain all information
+     *            available, if <code>false</code>, it will only contain the path information.
+     * @param numeric If <code>true</code>, file ownership and access permissions will be
+     *            represented numerically, if <code>false</code>, they will be represented as
+     *            strings. Only relevant if <var>verbose</var> is <code>true</code>.
+     */
+    public String describeLink(boolean verbose, boolean numeric)
+    {
+        return describeLink(verbose, numeric, true);
+    }
+
+    /**
+     * Returns a string description of this entry.
+     * 
+     * @param verbose If <code>true</code>, the link description will contain all information
+     *            available, if <code>false</code>, it will only contain the path information.
+     * @param numeric If <code>true</code>, file ownership and access permissions will be
+     *            represented numerically, if <code>false</code>, they will be represented as
+     *            strings. Only relevant if <var>verbose</var> is <code>true</code>.
+     * @param includeCheck If <code>true</code> (and if verification information is available for
+     *            this entry), add a (brief) verification status string.
+     * @see #hasVerificationInfo()
+     */
+    public String describeLink(boolean verbose, boolean numeric, boolean includeCheck)
+    {
+        final StringBuilder builder = new StringBuilder();
+        if (verbose == false)
+        {
+            builder.append(path);
+        } else
+        {
+            switch (completeness)
+            {
+                case BASE:
+                    if (linkType == FileLinkType.SYMLINK)
+                    {
+                        builder.append(String.format("          \t%s -> %s", path, linkTarget));
+                    } else if (linkType == FileLinkType.DIRECTORY)
+                    {
+                        builder.append(String.format("       DIR\t%s", path));
+                    } else
+                    {
+                        builder.append(String.format("%10d\t%s\t%s%s", size,
+                                Utils.crc32ToString(crc32), path,
+                                (linkType == FileLinkType.REGULAR_FILE) ? "" : "\t*"));
+                    }
+                    break;
+                case LAST_MODIFIED:
+                    if (linkType == FileLinkType.SYMLINK)
+                    {
+                        builder.append(String.format(
+                                "          \t%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS\t%2$s -> %3$s",
+                                lastModified * Utils.MILLIS_PER_SECOND, path, linkTarget));
+                    } else if (linkType == FileLinkType.DIRECTORY)
+                    {
+                        builder.append(String.format(
+                                "       DIR\t%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS\t%2$s",
+                                lastModified * Utils.MILLIS_PER_SECOND, path));
+                    } else
+                    {
+                        builder.append(String.format(
+                                "%10d\t%2$tY-%2$tm-%2$td %2$tH:%2$tM:%2$tS\t%3$s\t%4$s%5$s", size,
+                                lastModified * Utils.MILLIS_PER_SECOND, Utils.crc32ToString(crc32),
+                                path, (linkType == FileLinkType.REGULAR_FILE) ? "" : "\t*"));
+                    }
+                    break;
+                case FULL:
+                    if (linkType == FileLinkType.SYMLINK)
+                    {
+                        builder.append(String
+                                .format("%s\t%s\t%s\t          \t%4$tY-%4$tm-%4$td %4$tH:%4$tM:%4$tS\t        \t%5$s -> %6$s",
+                                        Utils.permissionsToString(permissions, false, numeric),
+                                        getUser(numeric), getGroup(numeric), lastModified
+                                                * Utils.MILLIS_PER_SECOND, path, linkTarget));
+                    } else if (linkType == FileLinkType.DIRECTORY)
+                    {
+                        builder.append(String
+                                .format("%s\t%s\t%s\t       DIR\t%4$tY-%4$tm-%4$td %4$tH:%4$tM:%4$tS\t        \t%5$s",
+                                        Utils.permissionsToString(permissions, true, numeric),
+                                        getUser(numeric), getGroup(numeric), lastModified
+                                                * Utils.MILLIS_PER_SECOND, path));
+                    } else
+                    {
+                        builder.append(String
+                                .format("%s\t%s\t%s\t%10d\t%5$tY-%5$tm-%5$td %5$tH:%5$tM:%5$tS\t%6$s\t%7$s%8$s",
+                                        Utils.permissionsToString(permissions, false, numeric),
+                                        getUser(numeric), getGroup(numeric), size, lastModified
+                                                * Utils.MILLIS_PER_SECOND,
+                                        Utils.crc32ToString(crc32), path,
+                                        (linkType == FileLinkType.REGULAR_FILE) ? "" : "\t*"));
+                    }
+                    break;
+                default:
+                    throw new Error("Unknown level of link completeness: " + completeness);
+            }
+        }
+        if (includeCheck && hasVerificationInfo())
+        {
+            builder.append('\t');
+            builder.append(getStatus(false));
+        }
+        return builder.toString();
+    }
+
+    @Override
+    public String toString()
+    {
+        return describeLink();
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryCompleteness.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryCompleteness.java
new file mode 100644
index 0000000..ecfd03e
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryCompleteness.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+/**
+ * An enum to specify the completeness of an archive entry.
+ *
+ * @author Bernd Rinn
+ */
+public enum ArchiveEntryCompleteness
+{
+    /** Contains only the path information. */
+    BASE, 
+    /** Contains the path information and the "last modified" timestamp. */
+    LAST_MODIFIED, 
+    /** Contains all information, including user and permission. */
+    FULL
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryExtractProcessor.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryExtractProcessor.java
new file mode 100644
index 0000000..5d09c6a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryExtractProcessor.java
@@ -0,0 +1,284 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.zip.CRC32;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Stat;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A processor that extracts files from an archive to the file system.
+ * 
+ * @author Bernd Rinn
+ */
+class ArchiveEntryExtractProcessor implements IArchiveEntryProcessor
+{
+    private static final int ROOT_UID = 0;
+
+    private final IArchiveEntryVisitor visitorOrNull;
+
+    private final ArchivingStrategy strategy;
+
+    private final File rootDirectory;
+
+    private final String rootPathToStrip;
+
+    private final byte[] buffer;
+
+    private final GroupCache groupCache;
+
+    ArchiveEntryExtractProcessor(IArchiveEntryVisitor visitorOrNull, ArchivingStrategy strategy,
+            File rootDirectory, String rootPathToStrip, byte[] buffer)
+    {
+        this.visitorOrNull = visitorOrNull;
+        this.strategy = strategy;
+        this.rootDirectory = rootDirectory;
+        final String normalizedRootPathToStrip = Utils.normalizePath(rootPathToStrip);
+        this.rootPathToStrip =
+                "/".equals(normalizedRootPathToStrip) ? "" : normalizedRootPathToStrip;
+        this.buffer = buffer;
+        this.groupCache = new GroupCache();
+    }
+
+    @Override
+    public boolean process(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException
+    {
+        if (strategy.doExclude(path, link.isDirectory()))
+        {
+            return false;
+        }
+        final File file = createFile(path);
+        if (link.isDirectory())
+        {
+            if (file.exists() && file.isDirectory() == false)
+            {
+                file.delete();
+            }
+            file.mkdirs();
+            if (file.isDirectory() == false)
+            {
+                errorStrategy.dealWithError(new UnarchivingException(file, new IOException(
+                        "Failed to make directory '" + file.getAbsolutePath() + "'.")));
+            }
+            if (visitorOrNull != null)
+            {
+                visitorOrNull.visit(new ArchiveEntry(dir, path, link, idCache));
+            }
+        } else if (link.tryGetLinkTarget() != null && Unix.isOperational())
+        {
+            try
+            {
+                file.delete();
+                final String linkTarget = link.tryGetLinkTarget();
+                Unix.createSymbolicLink(linkTarget, file.getAbsolutePath());
+                if (visitorOrNull != null)
+                {
+                    visitorOrNull.visit(new ArchiveEntry(dir, path, link, idCache));
+                }
+            } catch (IOExceptionUnchecked ex)
+            {
+                errorStrategy.dealWithError(new UnarchivingException(file, ex));
+            }
+        } else
+        {
+            if (link.isSymLink())
+            {
+                if (Unix.isOperational() == false)
+                {
+                    errorStrategy.warning("Warning: extracting symlink as regular file because"
+                            + " Unix calls are not available on this system.");
+                } else
+                {
+                    errorStrategy.dealWithError(new UnarchivingException(path,
+                            new HDF5JavaException("Symlink doesn't have a link target.")));
+                }
+            } else
+            {
+                try
+                {
+                    // Here we don't rely on link.getSize() to protect against wrong index entries.
+                    final long size = reader.object().getSize(path);
+                    final int crc32 = copyFromHDF5(reader, path, size, file);
+                    restoreAttributes(file, link);
+                    final FileSizeType sizeType = getFileSizeType(file);
+                    link.setVerifiedType(sizeType.type);
+                    link.setFileVerification(sizeType.size, crc32, file.lastModified()
+                            / Utils.MILLIS_PER_SECOND);
+                    final ArchiveEntry entry = new ArchiveEntry(dir, path, link, idCache);
+                    if (visitorOrNull != null)
+                    {
+                        visitorOrNull.visit(entry);
+                    }
+                    if (entry.isOK() == false)
+                    {
+                        errorStrategy.dealWithError(new UnarchivingException(path, entry
+                                .getStatus(true)));
+                    }
+                } catch (IOException ex)
+                {
+                    errorStrategy.dealWithError(new UnarchivingException(file, ex));
+                } catch (HDF5Exception ex)
+                {
+                    errorStrategy.dealWithError(new UnarchivingException(path, ex));
+                }
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void postProcessDirectory(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException, HDF5Exception
+    {
+        final File file = createFile(path);
+        restoreAttributes(file, link);
+    }
+
+    private File createFile(String path)
+    {
+        final String workingPath =
+                path.startsWith(rootPathToStrip) ? path.substring(rootPathToStrip.length()) : path;
+        final File file = new File(rootDirectory, workingPath);
+        return file;
+    }
+
+    /**
+     * A record for file size and type.
+     * 
+     * @author Bernd Rinn
+     */
+    private static class FileSizeType
+    {
+        final FileLinkType type;
+
+        final long size;
+
+        FileSizeType(FileLinkType type, long size)
+        {
+            super();
+            this.type = type;
+            this.size = size;
+        }
+    }
+
+    private FileSizeType getFileSizeType(final File file)
+    {
+        if (Unix.isOperational())
+        {
+            final Stat info = Unix.getLinkInfo(file.getPath(), false);
+            return new FileSizeType(info.getLinkType(), info.getSize());
+        } else
+        {
+            return new FileSizeType((file.isDirectory()) ? FileLinkType.DIRECTORY
+                    : (file.isFile() ? FileLinkType.REGULAR_FILE : FileLinkType.OTHER),
+                    file.length());
+        }
+
+    }
+
+    private int copyFromHDF5(final IHDF5Reader reader, final String objectPath, final long size,
+            File destination) throws IOException
+    {
+        final OutputStream output = FileUtils.openOutputStream(destination);
+        final CRC32 crc32 = new CRC32();
+        try
+        {
+            long offset = 0;
+            while (offset < size)
+            {
+                final int n =
+                        reader.opaque().readArrayToBlockWithOffset(objectPath, buffer, buffer.length,
+                                offset, 0);
+                offset += n;
+                output.write(buffer, 0, n);
+                crc32.update(buffer, 0, n);
+            }
+            output.close(); // Make sure we don't silence exceptions on closing.
+        } finally
+        {
+            IOUtils.closeQuietly(output);
+        }
+        return (int) crc32.getValue();
+    }
+
+    private void restoreAttributes(File file, LinkRecord linkInfoOrNull)
+    {
+        assert file != null;
+
+        if (linkInfoOrNull != null)
+        {
+            if (linkInfoOrNull.hasLastModified())
+            {
+                file.setLastModified(linkInfoOrNull.getLastModified() * Utils.MILLIS_PER_SECOND);
+            }
+            if (linkInfoOrNull.hasUnixPermissions() && Unix.isOperational())
+            {
+                Unix.setAccessMode(file.getPath(), linkInfoOrNull.getPermissions());
+                if (Unix.getUid() == ROOT_UID) // Are we root?
+                {
+                    Unix.setOwner(file.getPath(), linkInfoOrNull.getUid(), linkInfoOrNull.getGid());
+                } else
+                {
+                    if (groupCache.isUserInGroup(linkInfoOrNull.getGid()))
+                    {
+                        Unix.setOwner(file.getPath(), Unix.getUid(), linkInfoOrNull.getGid());
+                    }
+                }
+            }
+        }
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, String detailedMsg)
+    {
+        return new UnarchivingException(objectPath, detailedMsg);
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, HDF5Exception cause)
+    {
+        return new UnarchivingException(objectPath, cause);
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, RuntimeException cause)
+    {
+        return new UnarchivingException(objectPath, cause);
+    }
+
+    @Override
+    public ArchiverException createException(File file, IOException cause)
+    {
+        return new UnarchivingException(file, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryListProcessor.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryListProcessor.java
new file mode 100644
index 0000000..1ea8dbb
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryListProcessor.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.zip.CRC32;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.hdf5.HDF5LinkInformation;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * An {@Link IArchiveEntryProcessor} that performs a file list.
+ * 
+ * @author Bernd Rinn
+ */
+class ArchiveEntryListProcessor implements IArchiveEntryProcessor
+{
+    private final IArchiveEntryVisitor visitor;
+
+    private final byte[] buffer;
+
+    private final boolean checkArchive;
+
+    ArchiveEntryListProcessor(IArchiveEntryVisitor visitor, byte[] buffer, boolean checkArchive)
+    {
+        this.visitor = visitor;
+        this.buffer = buffer;
+        this.checkArchive = checkArchive;
+    }
+
+    @Override
+    public boolean process(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException
+    {
+        String errorMessage = null;
+        if (checkArchive)
+        {
+            final HDF5LinkInformation info = reader.object().getLinkInformation(path);
+            final FileLinkType verifiedType = Utils.translateType(info.getType());
+            link.setVerifiedType(verifiedType);
+            if (verifiedType == FileLinkType.REGULAR_FILE)
+            {
+                final long verifiedSize = reader.object().getSize(path);
+                int verifiedCrc32 = 0;
+                try
+                {
+                    verifiedCrc32 = calcCRC32Archive(path, verifiedSize, reader);
+                } catch (HDF5Exception ex)
+                {
+                    errorMessage = ex.getClass().getSimpleName() + ": " + ex.getMessage();
+                }
+                link.setFileVerification(verifiedSize, verifiedCrc32, Utils.UNKNOWN);
+            }
+        }
+        visitor.visit(new ArchiveEntry(dir, path, link, idCache, errorMessage));
+        return true;
+    }
+
+    @Override
+    public void postProcessDirectory(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException, HDF5Exception
+    {
+    }
+
+    private int calcCRC32Archive(String objectPath, long size, IHDF5Reader hdf5Reader)
+    {
+        final CRC32 crc32Digest = new CRC32();
+        long offset = 0;
+        while (offset < size)
+        {
+            final int n =
+                    hdf5Reader.opaque().readArrayToBlockWithOffset(objectPath, buffer, buffer.length,
+                            offset, 0);
+            offset += n;
+            crc32Digest.update(buffer, 0, n);
+        }
+        return (int) crc32Digest.getValue();
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, String detailedMsg)
+    {
+        return new ListArchiveException(objectPath, detailedMsg);
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, HDF5Exception cause)
+    {
+        if (isTooManySymlinksError(cause))
+        {
+            return new ListArchiveTooManySymbolicLinksException(objectPath, cause);
+        }
+        return new ListArchiveException(objectPath, cause);
+    }
+
+    private boolean isTooManySymlinksError(HDF5Exception cause)
+    {
+        return cause instanceof HDF5LibraryException
+                && ((HDF5LibraryException) cause).getMajorErrorNumber() == HDF5Constants.H5E_LINK
+                && "Too many soft links in path".equals(((HDF5LibraryException) cause)
+                        .getMinorError());
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, RuntimeException cause)
+    {
+        return new ListArchiveException(objectPath, cause);
+    }
+
+    @Override
+    public ArchiverException createException(File file, IOException cause)
+    {
+        return new ListArchiveException(file, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryVerifyProcessor.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryVerifyProcessor.java
new file mode 100644
index 0000000..ed6c4e4
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiveEntryVerifyProcessor.java
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Stat;
+import ch.systemsx.cisd.base.utilities.OSUtilities;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * An {@Link IArchiveEntryProcessor} that performs a verify operation versus a directory on
+ * the file system.
+ * 
+ * @author Bernd Rinn
+ */
+class ArchiveEntryVerifyProcessor implements IArchiveEntryProcessor
+{
+    private final IArchiveEntryVisitor visitor;
+
+    private final File rootDirectoryOnFS;
+
+    private final Set<File> filesOnFSOrNull;
+
+    private final String rootDirectoryInArchive;
+
+    private final byte[] buffer;
+
+    private final boolean checkAttributes;
+
+    private final boolean numeric;
+
+    ArchiveEntryVerifyProcessor(IArchiveEntryVisitor visitor, File rootDirectoryOnFS,
+            Set<File> filesOnFSOrNull, byte[] buffer, boolean checkAttributes, boolean numeric)
+    {
+        this(visitor, rootDirectoryOnFS, filesOnFSOrNull, "", buffer, checkAttributes, numeric);
+    }
+
+    ArchiveEntryVerifyProcessor(IArchiveEntryVisitor visitor, File rootDirectoryOnFS,
+            Set<File> filesOnFSOrNull, String rootDirectoryInArchive, byte[] buffer,
+            boolean checkAttributes, boolean numeric)
+    {
+        this.visitor = visitor;
+        this.rootDirectoryOnFS = rootDirectoryOnFS;
+        this.filesOnFSOrNull = filesOnFSOrNull;
+        this.rootDirectoryInArchive = Utils.normalizePath(rootDirectoryInArchive);
+        this.buffer = buffer;
+        this.checkAttributes = checkAttributes;
+        this.numeric = numeric;
+    }
+
+    @Override
+    public boolean process(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException
+    {
+        final String errorMessage = checkLink(link, path, idCache);
+        visitor.visit(new ArchiveEntry(dir, path, link, idCache, errorMessage));
+        if (filesOnFSOrNull != null)
+        {
+            filesOnFSOrNull.remove(new File(rootDirectoryOnFS, path));
+        }
+        return true;
+    }
+
+    @Override
+    public void postProcessDirectory(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException, HDF5Exception
+    {
+    }
+
+    private String checkLink(LinkRecord link, String path, IdCache idCache) throws IOException
+    {
+        if (rootDirectoryInArchive.length() > 0 && path.startsWith(rootDirectoryInArchive) == false)
+        {
+            return "Object '" + path + "' does not start with path prefix '"
+                    + rootDirectoryInArchive + "'.";
+        }
+        final String strippedPath = path.substring(rootDirectoryInArchive.length());
+        final File f = new File(rootDirectoryOnFS, strippedPath);
+        if (exists(f) == false)
+        {
+            link.setVerifiedType(FileLinkType.OTHER);
+            return "Object '" + strippedPath + "' does not exist on file system.";
+        }
+        final String symbolicLinkOrNull = tryGetSymbolicLink(f);
+        if (symbolicLinkOrNull != null)
+        {
+            link.setVerifiedType(FileLinkType.SYMLINK);
+            if (link.isSymLink() == false)
+            {
+                return "Object '" + strippedPath + "' is a " + link.getLinkType()
+                        + " in archive, but a symlink on file system.";
+            }
+            if (symbolicLinkOrNull.equals(link.tryGetLinkTarget()) == false)
+            {
+                return "Symlink '" + strippedPath + "' links to '" + link.tryGetLinkTarget()
+                        + "' in archive, but to '" + symbolicLinkOrNull + "' on file system";
+            }
+        } else if (f.isDirectory())
+        {
+            link.setVerifiedType(FileLinkType.DIRECTORY);
+            if (link.isDirectory() == false)
+            {
+                if (Unix.isOperational() || OSUtilities.isWindows())
+                {
+                    return "Object '" + strippedPath + "' is a " + link.getLinkType()
+                            + " in archive, but a directory on file system.";
+                } else
+                {
+                    return "Object '" + strippedPath + "' is a " + link.getLinkType()
+                            + " in archive, but a directory on file system (error may be "
+                            + "inaccurate because Unix system calls are not available.)";
+                }
+            }
+        } else
+        {
+            link.setVerifiedType(FileLinkType.REGULAR_FILE);
+            if (link.isDirectory())
+            {
+                return "Object '" + strippedPath
+                        + "' is a directory in archive, but a file on file system.";
+
+            }
+            if (link.isSymLink())
+            {
+                if (Unix.isOperational() || OSUtilities.isWindows())
+                {
+                    return "Object '" + strippedPath
+                            + "' is a symbolic link in archive, but a file on file system.";
+                } else
+                {
+                    return "Object '"
+                            + strippedPath
+                            + "' is a symbolic link in archive, but a file on file system "
+                            + "(error may be inaccurate because Unix system calls are not available.).";
+                }
+
+            }
+            final long size = f.length();
+            final int crc32 = calcCRC32Filesystem(f, buffer);
+            link.setFileVerification(size, crc32, f.lastModified() / Utils.MILLIS_PER_SECOND);
+            if (link.getSize() != size)
+            {
+                return "File '" + f.getAbsolutePath() + "' failed size test, expected: "
+                        + link.getSize() + ", found: " + size;
+            }
+            if (link.getSize() > 0 && link.getCrc32() == 0)
+            {
+                return "File '" + f.getAbsolutePath() + "': cannot verify (missing CRC checksum).";
+            }
+            if (link.getCrc32() != crc32)
+            {
+                return "File '" + f.getAbsolutePath() + "' failed CRC checksum test, expected: "
+                        + Utils.crc32ToString(link.getCrc32()) + ", found: "
+                        + Utils.crc32ToString(crc32) + ".";
+            }
+        }
+        return checkAttributes ? doFilesystemAttributeCheck(f, idCache, link, numeric) : null;
+    }
+
+    private static boolean exists(final File f)
+    {
+        if (Unix.isOperational())
+        {
+            return Unix.tryGetLinkInfo(f.getPath(), false) != null;
+        } else
+        {
+            return f.exists();
+        }
+    }
+
+    private static String tryGetSymbolicLink(File f)
+    {
+        if (Unix.isOperational())
+        {
+            return Unix.getLinkInfo(f.getPath()).tryGetSymbolicLink();
+        } else
+        {
+            return null;
+        }
+    }
+
+    private static int calcCRC32Filesystem(File source, byte[] buffer) throws IOException
+    {
+        final InputStream input = FileUtils.openInputStream(source);
+        final CRC32 crc32 = new CRC32();
+        try
+        {
+            int n = 0;
+            while (-1 != (n = input.read(buffer)))
+            {
+                crc32.update(buffer, 0, n);
+            }
+        } finally
+        {
+            IOUtils.closeQuietly(input);
+        }
+        return (int) crc32.getValue();
+    }
+
+    private static String doFilesystemAttributeCheck(File file, IdCache idCache, LinkRecord link,
+            boolean numeric)
+    {
+        final StringBuilder sb = new StringBuilder();
+        if (link.hasLastModified())
+        {
+            final long expectedLastModifiedMillis = link.getLastModified() * 1000L;
+            final long foundLastModifiedMillis = file.lastModified();
+            if (expectedLastModifiedMillis != foundLastModifiedMillis)
+            {
+                sb.append(String.format("'last modified time': (expected: "
+                        + "%1$tY-%1$tm-%1$td %1$tH:%1$tM:%1$tS, found: "
+                        + "%2$tY-%2$tm-%2$td %2$tH:%2$tM:%2$tS) ", expectedLastModifiedMillis,
+                        foundLastModifiedMillis));
+            }
+        }
+        if (link.hasUnixPermissions() && Unix.isOperational())
+        {
+            final Stat info = Unix.getLinkInfo(file.getPath(), false);
+            if (link.getPermissions() != info.getPermissions()
+                    || link.getLinkType() != info.getLinkType())
+            {
+                sb.append(String.format("'access permissions': (expected: %s, found: %s) ", Utils
+                        .permissionsToString(link.getPermissions(), link.isDirectory(), numeric),
+                        Utils.permissionsToString(info.getPermissions(),
+                                info.getLinkType() == FileLinkType.DIRECTORY, numeric)));
+            }
+            if (link.getUid() != info.getUid() || link.getGid() != info.getGid())
+            {
+                sb.append(String.format("'ownerwhip': (expected: %s:%s, found: %s:%s",
+                        idCache.getUser(link, numeric), idCache.getGroup(link, numeric),
+                        idCache.getUser(info, numeric), idCache.getGroup(info, numeric)));
+            }
+        }
+        if (sb.length() == 0)
+        {
+            return null;
+        } else
+        {
+            return "File '" + file.getAbsolutePath() + "': " + sb.toString();
+        }
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, String detailedMsg)
+    {
+        return new VerifyArchiveException(objectPath, detailedMsg);
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, HDF5Exception cause)
+    {
+        return new VerifyArchiveException(objectPath, cause);
+    }
+
+    @Override
+    public ArchiverException createException(String objectPath, RuntimeException cause)
+    {
+        return new VerifyArchiveException(objectPath, cause);
+    }
+
+    @Override
+    public ArchiverException createException(File file, IOException cause)
+    {
+        return new VerifyArchiveException(file, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiverException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiverException.java
new file mode 100644
index 0000000..4d2ecbc
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchiverException.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+
+/**
+ * Base class of exceptions from the archiver.
+ * 
+ * @author Bernd Rinn
+ */
+public abstract class ArchiverException extends RuntimeException
+{
+    private final String fileOrObjectPath;
+
+    private static final long serialVersionUID = 1L;
+
+    protected ArchiverException(String objectPath, String operationName, String detailedMsg)
+    {
+        super("Error " + operationName + " object '" + objectPath + "': " + detailedMsg, null);
+        this.fileOrObjectPath = objectPath;
+    }
+
+    protected ArchiverException(String objectPath, String operationName, HDF5Exception cause)
+    {
+        super("Error " + operationName + " object '" + objectPath + "' ["
+                + cause.getClass().getSimpleName() + "]: " + cause.getMessage(), cause);
+        this.fileOrObjectPath = objectPath;
+    }
+
+    protected ArchiverException(String objectPath, String operationName, RuntimeException cause)
+    {
+        super("Error " + operationName + " object '" + objectPath + "' ["
+                + cause.getClass().getSimpleName() + "]: " + cause.getMessage(), cause);
+        this.fileOrObjectPath = objectPath;
+    }
+
+    protected ArchiverException(File file, String operationName, IOExceptionUnchecked cause)
+    {
+        this(file, operationName, cause.getCause());
+    }
+
+    protected ArchiverException(File file, String operationName, IOException cause)
+    {
+        super("Error " + operationName + " file '" + file + "' [IO]: " + cause.getMessage(), cause);
+        this.fileOrObjectPath = file.getAbsolutePath();
+    }
+
+    protected ArchiverException(String filePath, String operationName, IOException cause)
+    {
+        super("Error " + operationName + " on reading input stream for object  '" + filePath
+                + "' [IO]: " + cause.getMessage(), cause);
+        this.fileOrObjectPath = filePath;
+    }
+
+    public final String getFileOrObjectPath()
+    {
+        return fileOrObjectPath;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingException.java
new file mode 100644
index 0000000..9235302
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingException.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+/**
+ * Exception thrown when archiving a file / directory fails.
+ *
+ * @author Bernd Rinn
+ */
+public class ArchivingException extends ArchiverException
+{
+    private static final long serialVersionUID = 1L;
+    
+    private static final String OPERATION_NAME = "archiving";
+    
+    public ArchivingException(String msg)
+    {
+        super("GENERAL", OPERATION_NAME, msg);
+    }
+    
+    public ArchivingException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public ArchivingException(File file, IOException cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+    public ArchivingException(String filePath, IOException cause)
+    {
+        super(filePath, OPERATION_NAME, cause);
+    }
+
+    public ArchivingException(String objectPath, String detailedMsg)
+    {
+        super(objectPath, OPERATION_NAME, detailedMsg);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategy.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategy.java
new file mode 100644
index 0000000..5da78fe
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategy.java
@@ -0,0 +1,390 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+
+/**
+ * A class that represents a strategy for excluding files from archiving / extracting and for
+ * compressing files in the archive.
+ * 
+ * @author Bernd Rinn
+ */
+public class ArchivingStrategy
+{
+
+    public enum CompressionStrategy
+    {
+        COMPRESS_NOTHING, COMPRESS_ALL, USE_BLACK_WHITE_LISTS
+    }
+
+    private List<Pattern> fileWhiteListOrNull;
+
+    private List<Pattern> fileBlackListOrNull;
+
+    private List<Pattern> dirWhiteListOrNull;
+
+    private List<Pattern> dirBlackListOrNull;
+
+    private List<Pattern> compressionWhiteListOrNull;
+
+    private List<Pattern> compressionBlackListOrNull;
+
+    private CompressionStrategy compressionStrategy;
+
+    private boolean sealed;
+
+    /**
+     * The default strategy: include everything, compress all files except those
+     * known to be already compressed.
+     */
+    public static final ArchivingStrategy DEFAULT = new ArchivingStrategy()
+            .addToCompressionBlackList(".*\\.zip").addToCompressionBlackList(".*\\.gz")
+            .addToCompressionBlackList(".*\\.bz2").seal();
+    
+    /**
+     * An alias for the default strategy (kept for backward compatibility).
+     */
+    public static final ArchivingStrategy DEFAULT_WITH_COMPRESSION = DEFAULT;
+
+    /**
+     * The default strategy without compression: include everything, compress nothing.
+     */
+    public static final ArchivingStrategy DEFAULT_NO_COMPRESSION = new ArchivingStrategy().seal();
+
+    public ArchivingStrategy()
+    {
+        this.compressionStrategy = CompressionStrategy.COMPRESS_NOTHING;
+    }
+    
+    public ArchivingStrategy(ArchivingStrategy template)
+    {
+        this.fileWhiteListOrNull = template.fileWhiteListOrNull;
+        this.fileBlackListOrNull = template.fileBlackListOrNull;
+        this.dirWhiteListOrNull = template.dirWhiteListOrNull;
+        this.dirBlackListOrNull = template.dirBlackListOrNull;
+        this.compressionStrategy = template.compressionStrategy;
+        this.compressionWhiteListOrNull = template.compressionWhiteListOrNull;
+        this.compressionBlackListOrNull = template.compressionBlackListOrNull;
+    }
+    
+    private List<Pattern> getOrCreateFileWhiteList()
+    {
+        if (fileWhiteListOrNull == null)
+        {
+            fileWhiteListOrNull = new ArrayList<Pattern>();
+        }
+        return fileWhiteListOrNull;
+    }
+
+    private List<Pattern> getOrCreateFileBlackList()
+    {
+        if (fileBlackListOrNull == null)
+        {
+            fileBlackListOrNull = new ArrayList<Pattern>();
+        }
+        return fileBlackListOrNull;
+    }
+
+    private List<Pattern> getOrCreateDirWhiteList()
+    {
+        if (dirWhiteListOrNull == null)
+        {
+            dirWhiteListOrNull = new ArrayList<Pattern>();
+        }
+        return dirWhiteListOrNull;
+    }
+
+    private List<Pattern> getOrCreateDirBlackList()
+    {
+        if (dirBlackListOrNull == null)
+        {
+            dirBlackListOrNull = new ArrayList<Pattern>();
+        }
+        return dirBlackListOrNull;
+    }
+
+    private List<Pattern> getOrCreateCompressionWhiteList()
+    {
+        if (compressionWhiteListOrNull == null)
+        {
+            compressionWhiteListOrNull = new ArrayList<Pattern>();
+        }
+        return compressionWhiteListOrNull;
+    }
+
+    private List<Pattern> getOrCreateCompressionBlackList()
+    {
+        if (compressionBlackListOrNull == null)
+        {
+            compressionBlackListOrNull = new ArrayList<Pattern>();
+        }
+        return compressionBlackListOrNull;
+    }
+
+    private void checkSealed()
+    {
+        if (sealed)
+        {
+            throw new IllegalStateException("ArchivingStrategy is sealed.");
+        }
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of files to include in archiving.
+     */
+    public ArchivingStrategy addToFileWhiteList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateFileWhiteList().add(pattern);
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of files to include in archiving.
+     */
+    public ArchivingStrategy addToFileWhiteList(String pattern)
+    {
+        checkSealed();
+        getOrCreateFileWhiteList().add(Pattern.compile(pattern));
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of files to exclude from archiving.
+     */
+    public ArchivingStrategy addToFileBlackList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateFileBlackList().add(pattern);
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of files to exclude from archiving.
+     */
+    public ArchivingStrategy addToFileBlackList(String pattern)
+    {
+        checkSealed();
+        getOrCreateFileBlackList().add(Pattern.compile(pattern));
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of directories to include in archiving.
+     */
+    public ArchivingStrategy addToDirWhiteList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateDirWhiteList().add(pattern);
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of directories to include in archiving.
+     */
+    public ArchivingStrategy addToDirWhiteList(String pattern)
+    {
+        checkSealed();
+        getOrCreateDirWhiteList().add(Pattern.compile(pattern));
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of directories to exclude from archiving.
+     */
+    public ArchivingStrategy addToDirBlackList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateDirBlackList().add(pattern);
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of directories to exclude from archiving.
+     */
+    public ArchivingStrategy addToDirBlackList(String pattern)
+    {
+        checkSealed();
+        getOrCreateDirBlackList().add(Pattern.compile(pattern));
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of files to store compressed in archive.
+     */
+    public ArchivingStrategy addToCompressionWhiteList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateCompressionWhiteList().add(pattern);
+        compressionStrategy = CompressionStrategy.USE_BLACK_WHITE_LISTS;
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the whitelist of files to store compressed in archive.
+     */
+    public ArchivingStrategy addToCompressionWhiteList(String pattern)
+    {
+        checkSealed();
+        getOrCreateCompressionWhiteList().add(Pattern.compile(pattern));
+        compressionStrategy = CompressionStrategy.USE_BLACK_WHITE_LISTS;
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of files to store compressed in archive.
+     */
+    public ArchivingStrategy addToCompressionBlackList(Pattern pattern)
+    {
+        checkSealed();
+        getOrCreateCompressionBlackList().add(pattern);
+        compressionStrategy = CompressionStrategy.USE_BLACK_WHITE_LISTS;
+        return this;
+    }
+
+    /**
+     * Add the given <var>pattern</var> to the blacklist of files to store compressed in archive.
+     */
+    public ArchivingStrategy addToCompressionBlackList(String pattern)
+    {
+        checkSealed();
+        getOrCreateCompressionBlackList().add(Pattern.compile(pattern));
+        compressionStrategy = CompressionStrategy.USE_BLACK_WHITE_LISTS;
+        return this;
+    }
+
+    /**
+     * Seal the strategy. After sealing, all attempts to modify the strategy will throw on
+     * {@link IllegalStateException}.
+     */
+    public ArchivingStrategy seal()
+    {
+        this.sealed = true;
+        return this;
+    }
+
+    /**
+     * Returns <code>true</code> if this strategy is sealed.
+     * 
+     * @see #seal()
+     */
+    public boolean isSealed()
+    {
+        return sealed;
+    }
+
+    /**
+     * Store all files compressed in archive.
+     */
+    public ArchivingStrategy compressAll()
+    {
+        checkSealed();
+        this.compressionStrategy = CompressionStrategy.COMPRESS_ALL;
+        return this;
+    }
+
+    /**
+     * Sets, whether all files should be stored compressed in archive (<code>true</code>) or not (
+     * <code>false</code>).
+     */
+    public ArchivingStrategy compressAll(boolean compress)
+    {
+        checkSealed();
+        this.compressionStrategy =
+                compress ? CompressionStrategy.COMPRESS_ALL : CompressionStrategy.COMPRESS_NOTHING;
+        return this;
+    }
+
+    /**
+     * @deprecated Use {@link #compressAll(boolean)} instead.
+     */
+    @Deprecated
+    public final void setCompress(boolean compress)
+    {
+        checkSealed();
+        this.compressionStrategy =
+                compress ? CompressionStrategy.COMPRESS_ALL : CompressionStrategy.COMPRESS_NOTHING;
+    }
+
+    boolean doExclude(String path, boolean isDirectory)
+    {
+        if (isDirectory)
+        {
+            return match(dirBlackListOrNull, dirWhiteListOrNull, path) == false;
+        } else
+        {
+            return match(fileBlackListOrNull, fileWhiteListOrNull, path) == false;
+        }
+    }
+
+    HDF5GenericStorageFeatures getStorageFeatureForPath(String path)
+    {
+        return doCompress(path) ? HDF5GenericStorageFeatures.GENERIC_DEFLATE
+                : HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION;
+    }
+
+    public boolean doCompress(String path)
+    {
+        switch (compressionStrategy)
+        {
+            case COMPRESS_NOTHING:
+                return false;
+            case COMPRESS_ALL:
+                return true;
+            default:
+                return match(compressionBlackListOrNull, compressionWhiteListOrNull, path);
+        }
+    }
+
+    public CompressionStrategy getCompressionStrategy()
+    {
+        return compressionStrategy;
+    }
+
+    private static boolean match(Iterable<Pattern> blackListOrNull,
+            Iterable<Pattern> whiteListOrNull, String path)
+    {
+        if (blackListOrNull != null)
+        {
+            for (Pattern p : blackListOrNull)
+            {
+                if (p.matcher(path).matches())
+                {
+                    return false;
+                }
+            }
+        }
+        if (whiteListOrNull == null)
+        {
+            return true;
+        }
+        for (Pattern p : whiteListOrNull)
+        {
+            if (p.matcher(path).matches())
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/DeleteFromArchiveException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/DeleteFromArchiveException.java
new file mode 100644
index 0000000..b2ee886
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/DeleteFromArchiveException.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+/**
+ * Exception thrown when deleting a file / directory in an archive fails.
+ *
+ * @author Bernd Rinn
+ */
+public class DeleteFromArchiveException extends ArchiverException
+{
+
+    private static final long serialVersionUID = 1L;
+    
+    private static final String OPERATION_NAME = "deleting";
+    
+    public DeleteFromArchiveException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public DeleteFromArchiveException(File file, IOException cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndex.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndex.java
new file mode 100644
index 0000000..e954e7d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndex.java
@@ -0,0 +1,534 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundMemberMapping.mapping;
+
+import java.io.File;
+import java.io.Flushable;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.hdf5.CharacterEncoding;
+import ch.systemsx.cisd.hdf5.HDF5CompoundMemberInformation;
+import ch.systemsx.cisd.hdf5.HDF5CompoundMemberMapping;
+import ch.systemsx.cisd.hdf5.HDF5CompoundType;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.HDF5EnumerationType;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.HDF5LinkInformation;
+import ch.systemsx.cisd.hdf5.IHDF5CompoundInformationRetriever;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+import ch.systemsx.cisd.hdf5.StringUtils;
+
+/**
+ * Memory representation of the directory index stored in an HDF5 archive.
+ * <p>
+ * Can operate in read-only or read-write mode. The mode is automatically determined by the
+ * <var>hdf5Reader</var> provided the constructor: If this is an instance of {@link IHDF5Writer},
+ * the directory index will be read-write, otherwise read-only.
+ * 
+ * @author Bernd Rinn
+ */
+class DirectoryIndex implements IDirectoryIndex
+{
+    private static final String CRC32_ATTRIBUTE_NAME = "CRC32";
+
+    private final IHDF5Reader hdf5Reader;
+
+    private final IHDF5Writer hdf5WriterOrNull;
+
+    private final String groupPath;
+
+    private final IErrorStrategy errorStrategy;
+
+    private final Set<Flushable> flushables;
+
+    /**
+     * The list of all links in this directory.
+     * <p>
+     * The order is to have all directories (in alphabetical order) before all files (in
+     * alphabetical order).
+     */
+    private LinkStore links;
+
+    private boolean readLinkTargets;
+
+    private boolean dirty;
+
+    /**
+     * Converts an array of {@link File}s into a list of {@link LinkRecord}s. The list is optimized
+     * for iterating through it and removing single entries during the iteration.
+     * <p>
+     * Note that the length of the list will always be the same as the length of <var>entries</var>.
+     * If some <code>stat</code> call failed on an entry, this entry will be <code>null</code>, so
+     * code using the returned list of this method needs to be prepared that this list may contain
+     * <code>null</code> values!
+     * 
+     * @return A list of {@link LinkRecord}s in the same order as <var>entries</var>.
+     */
+    public static List<LinkRecord> convertFilesToLinks(File[] files, IErrorStrategy errorStrategy)
+    {
+        final List<LinkRecord> list = new LinkedList<LinkRecord>();
+        for (File file : files)
+        {
+            list.add(LinkRecord.tryCreate(file, errorStrategy));
+        }
+        return list;
+    }
+
+    private static HDF5EnumerationType getHDF5LinkTypeEnumeration(IHDF5Reader reader)
+    {
+        return reader.enumeration().getType("linkType", getFileLinkTypeValues());
+    }
+
+    private static HDF5CompoundType<LinkRecord> getHDF5LinkCompoundType(IHDF5Reader reader)
+    {
+        return getHDF5LinkCompoundType(reader, getHDF5LinkTypeEnumeration(reader));
+    }
+
+    private static HDF5CompoundType<LinkRecord> getHDF5LinkCompoundType(IHDF5Reader reader,
+            HDF5EnumerationType hdf5LinkTypeEnumeration)
+    {
+        return reader.compound().getType(LinkRecord.class, getMapping(hdf5LinkTypeEnumeration));
+    }
+
+    private static String[] getFileLinkTypeValues()
+    {
+        final FileLinkType[] fileLinkTypes = FileLinkType.values();
+        final String[] values = new String[fileLinkTypes.length];
+        for (int i = 0; i < values.length; ++i)
+        {
+            values[i] = fileLinkTypes[i].name();
+        }
+        return values;
+    }
+
+    private static HDF5CompoundMemberMapping[] getMapping(HDF5EnumerationType linkEnumerationType)
+    {
+        return new HDF5CompoundMemberMapping[]
+            { mapping("linkNameLength"), mapping("linkType").enumType(linkEnumerationType),
+                    mapping("size"), mapping("lastModified"), mapping("uid"), mapping("gid"),
+                    mapping("permissions"), mapping("checksum").fieldName("crc32") };
+    }
+
+    /**
+     * Creates a new directory (group) index. Note that <var>hdf5Reader</var> needs to be an
+     * instance of {@link IHDF5Writer} if you intend to write the index to the archive.
+     */
+    DirectoryIndex(IHDF5Reader hdf5Reader, String groupPath, IErrorStrategy errorStrategy,
+            boolean readLinkTargets)
+    {
+        assert hdf5Reader != null;
+        assert groupPath != null;
+
+        this.hdf5Reader = hdf5Reader;
+        this.hdf5WriterOrNull =
+                (hdf5Reader instanceof IHDF5Writer) ? (IHDF5Writer) hdf5Reader : null;
+        if (hdf5WriterOrNull != null)
+        {
+            hdf5WriterOrNull.file().addFlushable(this);
+        }
+        this.groupPath = (groupPath.length() == 0) ? "/" : groupPath;
+        this.errorStrategy = errorStrategy;
+        this.flushables = new LinkedHashSet<Flushable>();
+        readIndex(readLinkTargets);
+    }
+
+    @Override
+    public boolean addFlushable(Flushable flushable)
+    {
+        return flushables.add(flushable);
+    }
+
+    @Override
+    public boolean removeFlushable(Flushable flushable)
+    {
+        return flushables.remove(flushable);
+    }
+
+    void flushExternals()
+    {
+        for (Flushable f : flushables)
+        {
+            try
+            {
+                f.flush();
+            } catch (Exception ex)
+            {
+                System.err.println("External flushable throws an exception:");
+                ex.printStackTrace();
+            }
+        }
+    }
+
+    /**
+     * Amend the index with link targets. If the links targets have already been read, this method
+     * is a noop.
+     */
+    @Override
+    public void amendLinkTargets()
+    {
+        if (readLinkTargets)
+        {
+            return;
+        }
+        links.amendLinkTargets(hdf5Reader, groupPath);
+        readLinkTargets = true;
+    }
+
+    private String getIndexDataSetName()
+    {
+        return groupPath + "/" + hdf5Reader.object().toHouseKeepingPath("INDEX");
+    }
+
+    private String getIndexNamesDataSetName()
+    {
+        return groupPath + "/" + hdf5Reader.object().toHouseKeepingPath("INDEXNAMES");
+    }
+
+    /**
+     * (Re-)Reads the directory index from the archive represented by <var>hdf5Reader</var>.
+     */
+    private void readIndex(boolean withLinkTargets)
+    {
+        boolean readingH5ArIndexWorked = false;
+        try
+        {
+            if (hdf5Reader.exists(getIndexDataSetName())
+                    && hdf5Reader.exists(getIndexNamesDataSetName()))
+            {
+                final HDF5CompoundType<LinkRecord> linkCompoundType =
+                        getHDF5LinkCompoundType(hdf5Reader);
+                final CRC32 crc32Digester = new CRC32();
+                final String indexDataSetName = getIndexDataSetName();
+                final HDF5CompoundMemberInformation[] info =
+                        linkCompoundType.getCompoundMemberInformation(DataTypeInfoOptions.MINIMAL);
+                final LinkRecord[] work =
+                        hdf5Reader.compound().readArray(indexDataSetName, linkCompoundType,
+                                new IHDF5CompoundInformationRetriever.IByteArrayInspector()
+                                    {
+                                        @Override
+                                        public void inspect(byte[] byteArray)
+                                        {
+                                            updateCRC32(byteArray, linkCompoundType, info,
+                                                    crc32Digester);
+                                        }
+                                    });
+                int crc32 = (int) crc32Digester.getValue();
+                int crc32Stored =
+                        hdf5Reader.int32().getAttr(indexDataSetName, CRC32_ATTRIBUTE_NAME);
+                if (crc32 != crc32Stored)
+                {
+                    if (calcLegacy_14_12_0_Checksum(indexDataSetName, linkCompoundType) != crc32Stored)
+                    {
+                        throw new ListArchiveException(groupPath,
+                                "CRC checksum mismatch on index (links). Expected: "
+                                        + Utils.crc32ToString(crc32Stored) + ", found: "
+                                        + Utils.crc32ToString(crc32));
+                    }
+                }
+                final String indexNamesDataSetName = getIndexNamesDataSetName();
+                final String concatenatedNames = hdf5Reader.readString(indexNamesDataSetName);
+                crc32 = calcCrc32(concatenatedNames);
+                crc32Stored =
+                        hdf5Reader.int32().getAttr(indexNamesDataSetName, CRC32_ATTRIBUTE_NAME);
+                if (crc32 != crc32Stored)
+                {
+                    throw new ListArchiveException(groupPath,
+                            "CRC checksum mismatch on index (names). Expected: "
+                                    + Utils.crc32ToString(crc32Stored) + ", found: "
+                                    + Utils.crc32ToString(crc32));
+                }
+                initLinks(work, concatenatedNames, withLinkTargets);
+                links = new LinkStore(work);
+                readingH5ArIndexWorked = true;
+            }
+        } catch (RuntimeException ex)
+        {
+            errorStrategy.dealWithError(new ListArchiveException(groupPath, ex));
+        }
+        // Fallback: couldn't read the index, reconstructing it from the group information.
+        if (readingH5ArIndexWorked == false)
+        {
+            if (hdf5Reader.object().isGroup(groupPath, false))
+            {
+                final List<HDF5LinkInformation> hdf5LinkInfos =
+                        hdf5Reader.object().getGroupMemberInformation(groupPath, withLinkTargets);
+                final LinkRecord[] work = new LinkRecord[hdf5LinkInfos.size()];
+                int idx = 0;
+                for (HDF5LinkInformation linfo : hdf5LinkInfos)
+                {
+                    final long size =
+                            linfo.isDataSet() ? hdf5Reader.object().getSize(linfo.getPath())
+                                    : Utils.UNKNOWN;
+                    work[idx++] = new LinkRecord(linfo, size);
+                }
+                Arrays.sort(work);
+                links = new LinkStore(work);
+            } else
+            {
+                links = new LinkStore();
+            }
+        }
+        readLinkTargets = withLinkTargets;
+        dirty = false;
+    }
+
+    private int calcLegacy_14_12_0_Checksum(final String indexDataSetName,
+            final HDF5CompoundType<LinkRecord> linkCompoundType)
+    {
+        final CRC32 crc32Digester = new CRC32();
+                hdf5Reader.compound().readArray(indexDataSetName, linkCompoundType,
+                        new IHDF5CompoundInformationRetriever.IByteArrayInspector()
+                            {
+                                @Override
+                                public void inspect(byte[] byteArray)
+                                {
+                                    crc32Digester.update(byteArray);
+                                }
+                            });
+        return (int) crc32Digester.getValue();
+    }
+
+    private void initLinks(final LinkRecord[] work, final String concatenatedNames,
+            boolean withLinkTargets)
+    {
+        int namePos = 0;
+        for (LinkRecord link : work)
+        {
+            namePos =
+                    link.initAfterReading(concatenatedNames, namePos, hdf5Reader, groupPath,
+                            withLinkTargets);
+        }
+    }
+
+    @Override
+    public boolean exists(String name)
+    {
+        return links.exists(name);
+    }
+
+    @Override
+    public boolean isDirectory(String name)
+    {
+        final LinkRecord link = links.tryGetLink(name);
+        return (link != null) && link.isDirectory();
+    }
+
+    /**
+     * Returns the link with {@link LinkRecord#getLinkName()} equal to <var>name</var>, or
+     * <code>null</code>, if there is no such link in the directory index.
+     */
+    @Override
+    public LinkRecord tryGetLink(String name)
+    {
+        final LinkRecord linkOrNull = links.tryGetLink(name);
+        if (linkOrNull != null)
+        {
+            linkOrNull.resetVerification();
+        }
+        return linkOrNull;
+    }
+
+    /**
+     * Returns <code>true</code>, if this class has link targets read.
+     */
+    @Override
+    public boolean hasLinkTargets()
+    {
+        return readLinkTargets;
+    }
+
+    //
+    // Iterable
+    //
+
+    @Override
+    public Iterator<LinkRecord> iterator()
+    {
+        return links.iterator();
+    }
+
+    //
+    // Writing methods
+    //
+
+    /**
+     * Writes the directory index to the archive represented by <var>hdf5Writer</var>.
+     * <p>
+     * Works on the list data structure.
+     */
+    @Override
+    public void flush()
+    {
+        flushExternals();
+        if (dirty == false)
+        {
+            return;
+        }
+        ensureWriteMode();
+        try
+        {
+            final StringBuilder concatenatedNames = new StringBuilder();
+            for (LinkRecord link : links)
+            {
+                link.prepareForWriting(concatenatedNames);
+            }
+            final String indexNamesDataSetName = getIndexNamesDataSetName();
+            final String concatenatedNamesStr = concatenatedNames.toString();
+            hdf5WriterOrNull.string().write(indexNamesDataSetName, concatenatedNamesStr,
+                    HDF5GenericStorageFeatures.GENERIC_DEFLATE);
+            hdf5WriterOrNull.int32().setAttr(indexNamesDataSetName, CRC32_ATTRIBUTE_NAME,
+                    calcCrc32(concatenatedNamesStr));
+            final String indexDataSetName = getIndexDataSetName();
+            final CRC32 crc32Digester = new CRC32();
+            final HDF5CompoundType<LinkRecord> linkCompoundType =
+                    getHDF5LinkCompoundType(hdf5WriterOrNull);
+            final HDF5CompoundMemberInformation[] info =
+                    linkCompoundType.getCompoundMemberInformation(DataTypeInfoOptions.MINIMAL);
+            hdf5WriterOrNull.compound().writeArray(indexDataSetName, linkCompoundType,
+                    links.getLinkArray(), HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION,
+                    new IHDF5CompoundInformationRetriever.IByteArrayInspector()
+                        {
+                            @Override
+                            public void inspect(byte[] byteArray)
+                            {
+                                updateCRC32(byteArray, linkCompoundType, info, crc32Digester);
+                            }
+                        });
+            hdf5WriterOrNull.int32().setAttr(indexDataSetName, CRC32_ATTRIBUTE_NAME,
+                    (int) crc32Digester.getValue());
+        } catch (HDF5Exception ex)
+        {
+            errorStrategy.dealWithError(new ListArchiveException(groupPath, ex));
+        }
+        dirty = false;
+    }
+
+    /**
+     * Add <var>entries</var> to the index. Any link that already exists in the index will be
+     * replaced.
+     */
+    @Override
+    public void updateIndex(LinkRecord[] entries)
+    {
+        ensureWriteMode();
+        links.update(entries);
+        dirty = true;
+    }
+
+    /**
+     * Add <var>entries</var> to the index. Any link that already exists in the index will be
+     * replaced.
+     */
+    @Override
+    public void updateIndex(Collection<LinkRecord> entries)
+    {
+        ensureWriteMode();
+        links.update(entries);
+        dirty = true;
+    }
+
+    /**
+     * Add <var>entry</var> to the index. If it already exists in the index, it will be replaced.
+     */
+    @Override
+    public void updateIndex(LinkRecord entry)
+    {
+        ensureWriteMode();
+        links.update(entry);
+        dirty = true;
+    }
+
+    /**
+     * Removes <var>linkName</var> from the index, if it is in.
+     * 
+     * @return <code>true</code>, if <var>linkName</var> was removed.
+     */
+    @Override
+    public boolean remove(String linkName)
+    {
+        ensureWriteMode();
+        final boolean storeChanged = links.remove(linkName);
+        dirty |= storeChanged;
+        return storeChanged;
+    }
+
+    private void ensureWriteMode()
+    {
+        if (hdf5WriterOrNull == null)
+        {
+            throw new IllegalStateException("Cannot write index in read-only mode.");
+        }
+    }
+
+    private int calcCrc32(String names)
+    {
+        final CRC32 crc32 = new CRC32();
+        crc32.update(StringUtils.toBytes0Term(names, names.length(), CharacterEncoding.UTF8));
+        return (int) crc32.getValue();
+    }
+
+    private void updateCRC32(byte[] byteArray, final HDF5CompoundType<LinkRecord> linkCompoundType,
+            final HDF5CompoundMemberInformation[] info, final CRC32 crc32Digester)
+    {
+        final int numberOfRecords = byteArray.length / linkCompoundType.getRecordSizeInMemory();
+        for (int i = 0; i < numberOfRecords; ++i)
+        {
+            final int recordOfs = i * linkCompoundType.getRecordSizeInMemory();
+            for (int j = 0; j < info.length; ++j)
+            {
+                final int ofs = recordOfs + info[j].getOffsetInMemory();
+                final int diskOfs = info[j].getOffsetOnDisk();
+                final int nextDiskOfs =
+                        (j + 1 < info.length) ? info[j + 1].getOffsetOnDisk() : linkCompoundType
+                                .getRecordSizeOnDisk();
+                final int sizeOnDisk = nextDiskOfs - diskOfs;
+                crc32Digester.update(byteArray, ofs, sizeOnDisk);
+            }
+        }
+    }
+
+    //
+    // Closeable
+    //
+
+    @Override
+    public void close() throws IOExceptionUnchecked
+    {
+        flush();
+        if (hdf5WriterOrNull != null)
+        {
+            hdf5WriterOrNull.file().removeFlushable(this);
+        }
+        flushables.clear();
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexProvider.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexProvider.java
new file mode 100644
index 0000000..c67f349
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexProvider.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A provider for {@link DirectoryIndex} objects.
+ * 
+ * @author Bernd Rinn
+ */
+class DirectoryIndexProvider implements IDirectoryIndexProvider
+{
+    private final Map<String, DirectoryIndex> cacheMap = new HashMap<String, DirectoryIndex>();
+
+    private final IHDF5Reader reader;
+
+    private final IErrorStrategy errorStrategy;
+
+    DirectoryIndexProvider(IHDF5Reader reader, IErrorStrategy errorStrategy)
+    {
+        this.reader = reader;
+        this.errorStrategy = errorStrategy;
+    }
+
+    @Override
+    public synchronized IDirectoryIndex get(String normalizedGroupPath, boolean withLinkTargets)
+    {
+        final String nonEmptyGroupPath =
+                (normalizedGroupPath.length() == 0) ? "/" : normalizedGroupPath;
+        DirectoryIndex index = cacheMap.get(nonEmptyGroupPath);
+        if (index == null)
+        {
+            index = new DirectoryIndex(reader, nonEmptyGroupPath, errorStrategy, withLinkTargets);
+            cacheMap.put(nonEmptyGroupPath, index);
+        } else if (withLinkTargets)
+        {
+            index.amendLinkTargets();
+        }
+        return index;
+    }
+
+    @Override
+    public IErrorStrategy getErrorStrategy()
+    {
+        return errorStrategy;
+    }
+
+    @Override
+    public synchronized void close() throws IOExceptionUnchecked
+    {
+        IOExceptionUnchecked exeptionOrNull = null;
+        for (DirectoryIndex index : cacheMap.values())
+        {
+            try
+            {
+                index.close();
+            } catch (IOExceptionUnchecked ex)
+            {
+                if (exeptionOrNull == null)
+                {
+                    exeptionOrNull = ex;
+                }
+            }
+        }
+        if (exeptionOrNull != null)
+        {
+            throw exeptionOrNull;
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdater.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdater.java
new file mode 100644
index 0000000..a306d74
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdater.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+
+/**
+ * A class to update the {@link DirectoryIndex} from files on the filesystem.
+ * 
+ * @author Bernd Rinn
+ */
+final class DirectoryIndexUpdater
+{
+    private final IDirectoryIndexProvider indexProvider;
+
+    private final IErrorStrategy errorStrategy;
+
+    DirectoryIndexUpdater(IDirectoryIndexProvider indexProvider)
+    {
+        this.indexProvider = indexProvider;
+        this.errorStrategy = indexProvider.getErrorStrategy();
+    }
+
+    void updateIndicesOnThePath(String rootDir, File path, int crc32, boolean immediateGroupOnly)
+            throws IOExceptionUnchecked
+    {
+        String groupPath =
+                rootDir.endsWith("/") ? rootDir.substring(0, rootDir.length() - 1) : rootDir;
+        final IDirectoryIndex index = indexProvider.get(groupPath, false);
+        final LinkRecord linkOrNull = LinkRecord.tryCreate(path, errorStrategy);
+        if (linkOrNull == null)
+        {
+            throw new IOExceptionUnchecked("Cannot get link information for path '" + path + "'.");
+        }
+        linkOrNull.setCrc32(crc32);
+        index.updateIndex(linkOrNull);
+
+        if (immediateGroupOnly == false)
+        {
+            final String pathPrefixOnFSOrNull = tryGetPathPrefix(groupPath, path.getAbsolutePath());
+            String groupName = Utils.getName(groupPath);
+            groupPath = Utils.getParentPath(groupPath);
+            while (groupName.length() > 0)
+            {
+                updateIndex(pathPrefixOnFSOrNull, groupPath, groupName);
+                groupName = Utils.getName(groupPath);
+                groupPath = Utils.getParentPath(groupPath);
+            }
+        }
+    }
+
+    private void updateIndex(String pathPrefixOnFSOrNull, String groupPath, String groupName)
+    {
+        final IDirectoryIndex index = indexProvider.get(groupPath, false);
+        if (pathPrefixOnFSOrNull == null)
+        {
+            index.updateIndex(new LinkRecord(groupName));
+        } else
+        {
+            final File groupPathFile = new File(pathPrefixOnFSOrNull, groupName);
+            index.updateIndex(LinkRecord.tryCreate(groupPathFile, errorStrategy));
+        }
+    }
+
+    private String tryGetPathPrefix(String root, String filePath)
+    {
+        final String parentPath = Utils.getParentPath(filePath);
+        if (parentPath.endsWith(root) == false)
+        {
+            return null;
+        }
+        final String pathPrefix = parentPath.substring(0, parentPath.length() - root.length());
+        if (pathPrefix.length() == 0)
+        {
+            return "/";
+        } else
+        {
+            return pathPrefix;
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/GroupCache.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/GroupCache.java
new file mode 100644
index 0000000..bbad2a9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/GroupCache.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.ArrayUtils;
+
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Group;
+import ch.systemsx.cisd.base.unix.Unix.Password;
+
+/**
+ * Cache for group affiliations of the current user.
+ * 
+ * @author Bernd RInn
+ */
+class GroupCache
+{
+    private final Password userOrNull;
+
+    /** Gid -> Is user member? */
+    private final Map<Integer, Boolean> gidMap = new HashMap<Integer, Boolean>();
+
+    GroupCache()
+    {
+        this.userOrNull = Unix.isOperational() ? Unix.tryGetUserByUid(Unix.getUid()) : null;
+    }
+
+    boolean isUserInGroup(int gid)
+    {
+        if (userOrNull == null)
+        {
+            return false;
+        }
+        final Boolean cached = gidMap.get(gid);
+        if (cached != null)
+        {
+            return cached;
+        }
+        final Group groupOrNull = Unix.tryGetGroupByGid(gid);
+        if (groupOrNull != null)
+        {
+            final int idx =
+                    ArrayUtils.indexOf(groupOrNull.getGroupMembers(), userOrNull.getUserName());
+            final Boolean found =
+                    idx != ArrayUtils.INDEX_NOT_FOUND ? Boolean.TRUE : Boolean.FALSE;
+            gidMap.put(gid, found);
+            return found;
+        } else
+        {
+            gidMap.put(gid, Boolean.FALSE);
+            return false;
+        }
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveDeleter.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveDeleter.java
new file mode 100644
index 0000000..654d8e3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveDeleter.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.List;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * A class to delete paths from an <code>h5ar</code> archives.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ArchiveDeleter
+{
+    private final IHDF5Writer hdf5Writer;
+
+    private final IDirectoryIndexProvider indexProvider;
+    
+    private final IdCache idCache;
+
+    public HDF5ArchiveDeleter(IHDF5Writer hdf5Writer, IDirectoryIndexProvider indexProvider, IdCache idCache)
+    {
+        this.hdf5Writer = hdf5Writer;
+        this.indexProvider = indexProvider;
+        this.idCache = idCache;
+    }
+
+    public HDF5ArchiveDeleter delete(List<String> hdf5ObjectPaths, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        for (String path : hdf5ObjectPaths)
+        {
+            final String normalizedPath = Utils.normalizePath(path);
+            final String group = Utils.getParentPath(normalizedPath);
+            final IDirectoryIndex index = indexProvider.get(group, false);
+            try
+            {
+                final String name = Utils.getName(normalizedPath);
+                LinkRecord link = index.tryGetLink(name);
+                if (link == null)
+                {
+                    link = LinkRecord.tryReadFromArchive(hdf5Writer, normalizedPath);
+                }
+                if (link != null)
+                {
+                    hdf5Writer.delete(normalizedPath);
+                    index.remove(name);
+                    if (entryVisitorOrNull != null)
+                    {
+                        final ArchiveEntry entry = new ArchiveEntry(group, normalizedPath, link, idCache);
+                        entryVisitorOrNull.visit(entry);
+                    }
+                }
+            } catch (HDF5Exception ex)
+            {
+                indexProvider.getErrorStrategy().dealWithError(
+                        new DeleteFromArchiveException(path, ex));
+            }
+        }
+        return this;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveTraverser.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveTraverser.java
new file mode 100644
index 0000000..0b55920
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveTraverser.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A traverser for <code>h5ar</code> archives.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ArchiveTraverser
+{
+    interface IDirectoryChecker
+    {
+        boolean isDirectoryFollowSymlinks(ArchiveEntry entry);
+    }
+
+    private final IHDF5Reader hdf5Reader;
+
+    private final IDirectoryIndexProvider indexProvider;
+
+    private final IErrorStrategy errorStrategy;
+
+    private final IdCache idCache;
+
+    private final IDirectoryChecker directoryChecker;
+
+    public HDF5ArchiveTraverser(IDirectoryChecker directoryChecker, IHDF5Reader hdf5Reader,
+            IDirectoryIndexProvider indexProvider, IdCache idCache)
+    {
+        this.directoryChecker = directoryChecker;
+        this.hdf5Reader = hdf5Reader;
+        this.indexProvider = indexProvider;
+        this.errorStrategy = indexProvider.getErrorStrategy();
+        this.idCache = idCache;
+    }
+
+    public void process(String fileOrDir, boolean recursive, boolean readLinkTargets,
+            boolean followSymlinks, IArchiveEntryProcessor processor)
+    {
+        final String normalizedPath = Utils.normalizePath(fileOrDir);
+        final boolean isDirectory = hdf5Reader.object().isGroup(normalizedPath, followSymlinks);
+        final boolean effectiveReadLinkTargets = readLinkTargets | followSymlinks;
+
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        LinkRecord link = null;
+        if (parentPath.length() > 0)
+        {
+            link =
+                    indexProvider.get(parentPath, effectiveReadLinkTargets).tryGetLink(
+                            getNameFromPath(normalizedPath, parentPath));
+            if (link == null)
+            {
+                errorStrategy.dealWithError(processor.createException(normalizedPath,
+                        "Object not found in archive."));
+                return;
+            }
+            try
+            {
+                if (processor.process(parentPath, normalizedPath, link, hdf5Reader, idCache,
+                        errorStrategy) == false)
+                {
+                    return;
+                }
+            } catch (IOException ex)
+            {
+                final File f = new File(normalizedPath);
+                errorStrategy.dealWithError(processor.createException(f, ex));
+            } catch (HDF5Exception ex)
+            {
+                errorStrategy.dealWithError(processor.createException(normalizedPath, ex));
+            }
+        }
+        if (isDirectory)
+        {
+            processDirectory(normalizedPath, recursive, effectiveReadLinkTargets, followSymlinks,
+                    processor);
+            postProcessDirectory(parentPath, normalizedPath, link, processor);
+        }
+    }
+
+    private String getNameFromPath(final String normalizedPath, final String parentPath)
+    {
+        return normalizedPath.substring(parentPath.length() == 1 ? 1 : parentPath.length() + 1);
+    }
+
+    private void postProcessDirectory(final String parentPath, final String normalizedPath,
+            LinkRecord linkOrNull, IArchiveEntryProcessor processor)
+    {
+        if (linkOrNull != null)
+        {
+            try
+            {
+                processor.postProcessDirectory(parentPath, normalizedPath, linkOrNull, hdf5Reader,
+                        idCache, errorStrategy);
+            } catch (IOException ex)
+            {
+                final File f = new File(normalizedPath);
+                errorStrategy.dealWithError(processor.createException(f, ex));
+            } catch (HDF5Exception ex)
+            {
+                errorStrategy.dealWithError(processor.createException(normalizedPath, ex));
+            }
+        }
+    }
+
+    /**
+     * Provide the entries of <var>normalizedDir</var> to <var>processor</var>.
+     */
+    private void processDirectory(String normalizedDir, boolean recursive, boolean readLinkTargets,
+            boolean followSymlinks, IArchiveEntryProcessor processor)
+    {
+        if (hdf5Reader.object().exists(normalizedDir, followSymlinks) == false)
+        {
+            if (hdf5Reader.object().exists(normalizedDir, false) == false)
+            {
+                errorStrategy.dealWithError(processor.createException(normalizedDir,
+                        "Directory not found in archive."));
+            }
+            return;
+        }
+        for (LinkRecord link : indexProvider.get(normalizedDir, readLinkTargets))
+        {
+            final String path = Utils.concatLink(normalizedDir, link.getLinkName());
+            try
+            {
+                if (processor
+                        .process(normalizedDir, path, link, hdf5Reader, idCache, errorStrategy) == false)
+                {
+                    continue;
+                }
+                if (recursive && isDirectory(path, link, followSymlinks))
+                {
+                    processDirectory(path, recursive, readLinkTargets, followSymlinks, processor);
+                    postProcessDirectory(normalizedDir, path, link, processor);
+                }
+            } catch (IOException ex)
+            {
+                final File f = new File(path);
+                errorStrategy.dealWithError(processor.createException(f, ex));
+            } catch (HDF5Exception ex)
+            {
+                errorStrategy.dealWithError(processor.createException(path, ex));
+            }
+        }
+    }
+
+    private boolean isDirectory(String path, LinkRecord link, boolean followSymlinks)
+    {
+        if (link.isDirectory() == false && followSymlinks)
+        {
+            return directoryChecker.isDirectoryFollowSymlinks(toArchiveEntry(path, link));
+        } else
+        {
+            return link.isDirectory();
+        }
+    }
+
+    private ArchiveEntry toArchiveEntry(String path, LinkRecord linkRecord)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        return Utils.tryToArchiveEntry(parentPath, normalizedPath, linkRecord, idCache);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveUpdater.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveUpdater.java
new file mode 100644
index 0000000..29ca95a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiveUpdater.java
@@ -0,0 +1,723 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.Flushable;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Iterator;
+import java.util.List;
+import java.util.zip.CRC32;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.io.IOutputStream;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.HDF5OpaqueType;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory;
+
+/**
+ * A class to create or update <code>h5ar</code> archives.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5ArchiveUpdater
+{
+    private static final String OPAQUE_TAG_FILE = "FILE";
+
+    private static final int SIZEHINT_FACTOR = 5;
+
+    private static final int MIN_GROUP_MEMBER_COUNT_TO_COMPUTE_SIZEHINT = 100;
+
+    private static final int SMALL_DATASET_LIMIT = 4096;
+
+    private final IHDF5Writer hdf5Writer;
+
+    private final IDirectoryIndexProvider indexProvider;
+
+    private final IErrorStrategy errorStrategy;
+
+    private final DirectoryIndexUpdater indexUpdater;
+
+    private final IdCache idCache;
+
+    private final byte[] buffer;
+
+    static class DataSetInfo
+    {
+        final long size;
+
+        final int crc32;
+
+        DataSetInfo(long size, int crc32)
+        {
+            this.size = size;
+            this.crc32 = crc32;
+        }
+    }
+
+    private final class H5ARIOutputStream implements IOutputStream, Flushable
+    {
+        private final IOutputStream delegate;
+
+        private final String directory;
+
+        private final String path;
+
+        private final LinkRecord link;
+
+        private final CRC32 crc32 = new CRC32();
+
+        private long size = 0;
+
+        H5ARIOutputStream(String normalizedDirectory, LinkRecord link, int chunkSize,
+                boolean compress)
+        {
+            this.directory = normalizedDirectory;
+            this.path = Utils.concatLink(this.directory, link.getLinkName());
+            this.link = link;
+            final HDF5GenericStorageFeatures creationStorageFeature =
+                    compress ? HDF5GenericStorageFeatures.GENERIC_DEFLATE
+                            : HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION;
+            this.delegate =
+                    HDF5IOAdapterFactory.asIOutputStream(hdf5Writer, path, creationStorageFeature,
+                            getEffectiveChunkSize(chunkSize), OPAQUE_TAG_FILE);
+            indexProvider.get(normalizedDirectory, false).addFlushable(this);
+        }
+
+        @Override
+        public void write(int b) throws IOExceptionUnchecked
+        {
+            crc32.update(b);
+            ++size;
+            delegate.write(b);
+        }
+
+        @Override
+        public void write(byte[] b) throws IOExceptionUnchecked
+        {
+            crc32.update(b);
+            size += b.length;
+            delegate.write(b);
+        }
+
+        @Override
+        public void write(byte[] b, int off, int len) throws IOExceptionUnchecked
+        {
+            crc32.update(b, off, len);
+            size += len;
+            delegate.write(b, off, len);
+        }
+
+        @Override
+        public void flush() throws IOExceptionUnchecked
+        {
+            link.setCrc32((int) crc32.getValue());
+            link.setSize(size);
+            final boolean updateImmediateGroupOnly = hdf5Writer.isGroup(directory);
+            updateIndicesOnThePath(path, link, updateImmediateGroupOnly);
+            delegate.flush();
+        }
+
+        @Override
+        public void synchronize() throws IOExceptionUnchecked
+        {
+            delegate.synchronize();
+        }
+
+        @Override
+        public void close() throws IOExceptionUnchecked
+        {
+            flush();
+            delegate.close();
+            indexProvider.get(path, false).removeFlushable(this);
+        }
+
+    }
+
+    public HDF5ArchiveUpdater(IHDF5Writer hdf5Writer, IDirectoryIndexProvider indexProvider,
+            IdCache idCache, byte[] buffer)
+    {
+        this.hdf5Writer = hdf5Writer;
+        this.indexProvider = indexProvider;
+        this.idCache = idCache;
+        this.errorStrategy = indexProvider.getErrorStrategy();
+        this.indexUpdater = new DirectoryIndexUpdater(indexProvider);
+        this.buffer = buffer;
+    }
+
+    public HDF5ArchiveUpdater archive(File path, ArchivingStrategy strategy, int chunkSize,
+            boolean keepNameFromPath, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        final File absolutePath = Utils.normalizePath(path);
+        return archive(keepNameFromPath ? absolutePath.getParentFile() : absolutePath,
+                absolutePath, strategy, chunkSize, entryVisitorOrNull);
+    }
+
+    public IOutputStream archiveFile(String directory, LinkRecord link, boolean compress,
+            int chunkSize)
+    {
+        if (link.getLinkType() != FileLinkType.REGULAR_FILE)
+        {
+            errorStrategy.dealWithError(new ArchivingException("A regular file is expected here."));
+        }
+        return new H5ARIOutputStream(Utils.normalizePath(directory), link, chunkSize, compress);
+    }
+
+    public HDF5ArchiveUpdater archive(String directory, LinkRecord link, InputStream inputOrNull,
+            boolean compress, int chunkSize)
+    {
+        boolean ok = true;
+        final String normalizedDir = Utils.normalizePath(directory);
+        final String hdf5ObjectPath = Utils.concatLink(normalizedDir, link.getLinkName());
+        final ArchiveEntry entry = new ArchiveEntry(normalizedDir, hdf5ObjectPath, link, idCache);
+        final boolean groupExists = hdf5Writer.isGroup(normalizedDir);
+        if (link.getLinkType() == FileLinkType.DIRECTORY)
+        {
+            if (inputOrNull == null)
+            {
+                ok = archiveEmptyDirectory(normalizedDir, link);
+            } else
+            {
+                errorStrategy.dealWithError(new ArchivingException(
+                        "Cannot take InputStream when archiving a directory."));
+            }
+        } else if (link.getLinkType() == FileLinkType.SYMLINK)
+        {
+            if (inputOrNull == null)
+            {
+                ok = archiveSymLink(entry);
+            } else
+            {
+                errorStrategy.dealWithError(new ArchivingException(
+                        "Cannot take InputStream when archiving a symlink."));
+            }
+        } else if (link.getLinkType() == FileLinkType.REGULAR_FILE)
+        {
+            if (inputOrNull != null)
+            {
+                final HDF5GenericStorageFeatures compression =
+                        compress ? HDF5GenericStorageFeatures.GENERIC_DEFLATE
+                                : HDF5GenericStorageFeatures.GENERIC_NO_COMPRESSION;
+                try
+                {
+                    final DataSetInfo info =
+                            copyToHDF5(inputOrNull, hdf5ObjectPath, compression, chunkSize);
+                    link.setCrc32(info.crc32);
+                    link.setSize(info.size);
+                } catch (IOException ex)
+                {
+                    ok = false;
+                    errorStrategy.dealWithError(new ArchivingException(hdf5ObjectPath, ex));
+                } catch (HDF5Exception ex)
+                {
+                    ok = false;
+                    errorStrategy.dealWithError(new ArchivingException(hdf5ObjectPath, ex));
+                }
+            } else
+            {
+                errorStrategy.dealWithError(new ArchivingException(
+                        "Need to have InputStream when archiving a regular file."));
+            }
+        } else
+        {
+            errorStrategy.dealWithError(new ArchivingException(
+                    "Don't know how to archive file link type " + link.getLinkType()));
+            ok = false;
+        }
+        if (ok)
+        {
+            updateIndicesOnThePath(hdf5ObjectPath, link, groupExists);
+        }
+        return this;
+    }
+
+    public HDF5ArchiveUpdater archive(String rootDirInArchive, File path,
+            ArchivingStrategy strategy, int chunkSize, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        final File absolutePath = Utils.normalizePath(path);
+        final String normalizedRootDirInArchive = Utils.normalizePath(rootDirInArchive);
+        final String hdf5ObjectPath =
+                Utils.concatLink(normalizedRootDirInArchive, absolutePath.getName());
+        final String hdf5GroupPath = Utils.getParentPath(hdf5ObjectPath);
+        final boolean groupExists = hdf5Writer.isGroup(hdf5GroupPath);
+        final boolean ok;
+        int crc32 = 0;
+        final LinkRecord linkOrNull = LinkRecord.tryCreate(absolutePath, errorStrategy);
+        if (linkOrNull == null)
+        {
+            return this;
+        }
+        final ArchiveEntry entry =
+                new ArchiveEntry(normalizedRootDirInArchive, hdf5ObjectPath, linkOrNull, idCache);
+        if (linkOrNull.isSymLink())
+        {
+            ok = archiveSymLink(entry, absolutePath, entryVisitorOrNull);
+        } else if (absolutePath.isDirectory())
+        {
+            ok = archiveDirectory(absolutePath, entry, strategy, chunkSize, entryVisitorOrNull);
+        } else if (absolutePath.isFile())
+        {
+            final DataSetInfo dataSetInfoOrNull =
+                    tryArchiveFile(absolutePath, entry,
+                            strategy.getStorageFeatureForPath(hdf5ObjectPath), chunkSize,
+                            entryVisitorOrNull);
+            ok = (dataSetInfoOrNull != null);
+            if (dataSetInfoOrNull != null)
+            {
+                crc32 = dataSetInfoOrNull.crc32;
+            }
+        } else
+        {
+            ok = false;
+            errorStrategy.dealWithError(new ArchivingException(absolutePath, new IOException(
+                    "Path corresponds to neither a file nor a directory.")));
+        }
+        if (ok)
+        {
+            indexUpdater.updateIndicesOnThePath(normalizedRootDirInArchive, absolutePath, crc32,
+                    groupExists);
+        }
+        return this;
+    }
+
+    public HDF5ArchiveUpdater archiveBelow(String rootDirInArchive, File directory,
+            ArchivingStrategy strategy, int chunkSize, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        final File absoluteDirectory = Utils.normalizePath(directory);
+        if (absoluteDirectory.isDirectory())
+        {
+            final LinkRecord linkOrNull = LinkRecord.tryCreate(absoluteDirectory, errorStrategy);
+            if (linkOrNull == null)
+            {
+                return this;
+            }
+            final String normalizedRootDirInArchive = Utils.normalizePath(rootDirInArchive);
+            final ArchiveEntry dirEntry =
+                    new ArchiveEntry(null, normalizedRootDirInArchive, linkOrNull, idCache);
+            archiveDirectory(absoluteDirectory, dirEntry, strategy, chunkSize, entryVisitorOrNull);
+        } else
+        {
+            errorStrategy.dealWithError(new ArchivingException(absoluteDirectory, new IOException(
+                    "Path does not correspond to a directory.")));
+        }
+        return this;
+    }
+
+    public HDF5ArchiveUpdater archive(File parentDirToStrip, File path, ArchivingStrategy strategy,
+            int chunkSize, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        final File absoluteParentDirToStrip = Utils.normalizePath(parentDirToStrip);
+        final File absolutePath = Utils.normalizePath(path);
+        final String hdf5ObjectPath = getRelativePath(absoluteParentDirToStrip, absolutePath);
+        final String hdf5GroupPath = Utils.getParentPath(hdf5ObjectPath);
+        final boolean groupExists =
+                (hdf5GroupPath.length() == 0) ? true : hdf5Writer.isGroup(hdf5GroupPath);
+        final boolean ok;
+        int crc32 = 0;
+        final LinkRecord linkOrNull = LinkRecord.tryCreate(absolutePath, errorStrategy);
+        final ArchiveEntry entry =
+                new ArchiveEntry(hdf5GroupPath, hdf5ObjectPath, linkOrNull, idCache);
+        if (linkOrNull != null && linkOrNull.isSymLink())
+        {
+            ok = archiveSymLink(entry, absolutePath, entryVisitorOrNull);
+        } else if (absolutePath.isDirectory())
+        {
+            ok = archiveDirectory(absolutePath, entry, strategy, chunkSize, entryVisitorOrNull);
+        } else if (absolutePath.isFile())
+        {
+            final DataSetInfo dataSetInfoOrNull =
+                    tryArchiveFile(absolutePath, entry,
+                            strategy.getStorageFeatureForPath(hdf5ObjectPath), chunkSize,
+                            entryVisitorOrNull);
+            ok = (dataSetInfoOrNull != null);
+            if (dataSetInfoOrNull != null)
+            {
+                crc32 = dataSetInfoOrNull.crc32;
+            }
+        } else
+        {
+            ok = false;
+            errorStrategy.dealWithError(new ArchivingException(absolutePath, new IOException(
+                    "Path corresponds to neither a file nor a directory.")));
+        }
+        if (ok)
+        {
+            updateIndicesOnThePath(absoluteParentDirToStrip, absolutePath, crc32, groupExists);
+        }
+        return this;
+    }
+
+    private void updateIndicesOnThePath(File parentDirToStrip, File path, int crc32,
+            boolean immediateGroupOnly)
+    {
+        final String rootAbsolute = parentDirToStrip.getAbsolutePath();
+        File pathProcessing = path;
+        int crc32Processing = crc32;
+        while (true)
+        {
+            File dirProcessingOrNull = pathProcessing.getParentFile();
+            String dirAbsolute =
+                    (dirProcessingOrNull != null) ? dirProcessingOrNull.getAbsolutePath() : "";
+            if (dirProcessingOrNull == null || dirAbsolute.startsWith(rootAbsolute) == false)
+            {
+                break;
+            }
+            final String hdf5GroupPath = getRelativePath(rootAbsolute, dirAbsolute);
+            final IDirectoryIndex index = indexProvider.get(hdf5GroupPath, false);
+            final LinkRecord linkOrNull = LinkRecord.tryCreate(pathProcessing, errorStrategy);
+            if (linkOrNull != null)
+            {
+                linkOrNull.setCrc32(crc32Processing);
+                crc32Processing = 0; // Directories don't have a checksum
+                index.updateIndex(linkOrNull);
+            }
+            pathProcessing = dirProcessingOrNull;
+            if (immediateGroupOnly)
+            {
+                break;
+            }
+        }
+    }
+
+    private void updateIndicesOnThePath(String path, LinkRecord link, boolean immediateGroupOnly)
+    {
+        String pathProcessing = Utils.normalizePath(path);
+        if ("/".equals(pathProcessing))
+        {
+            return;
+        }
+        int crc32 = link.getCrc32();
+        long size = link.getSize();
+        long lastModified = link.getLastModified();
+        short permissions = link.getPermissions();
+        int uid = link.getUid();
+        int gid = link.getGid();
+        FileLinkType fileLinkType = link.getLinkType();
+        String linkTargetOrNull = link.tryGetLinkTarget();
+        while (true)
+        {
+            final String hdf5GroupPath = Utils.getParentPath(pathProcessing);
+            final IDirectoryIndex index = indexProvider.get(hdf5GroupPath, false);
+            final String hdf5FileName = Utils.getName(pathProcessing);
+            final LinkRecord linkProcessing =
+                    new LinkRecord(hdf5FileName, linkTargetOrNull, fileLinkType, size,
+                            lastModified, uid, gid, permissions, crc32);
+            index.updateIndex(linkProcessing);
+            fileLinkType = FileLinkType.DIRECTORY;
+            crc32 = 0; // Directories don't have a checksum
+            size = Utils.UNKNOWN; // Directories don't have a size
+            linkTargetOrNull = null; // Directories don't have a link target
+            pathProcessing = hdf5GroupPath;
+            if (immediateGroupOnly || "/".equals(pathProcessing))
+            {
+                break;
+            }
+        }
+    }
+
+    private boolean archiveEmptyDirectory(String parentDirectory, LinkRecord link)
+    {
+        final String hdf5GroupPath = Utils.concatLink(parentDirectory, link.getLinkName());
+        try
+        {
+            hdf5Writer.object().createGroup(hdf5GroupPath);
+            return true;
+        } catch (HDF5Exception ex)
+        {
+            errorStrategy.dealWithError(new ArchivingException(hdf5GroupPath, ex));
+            return false;
+        }
+    }
+
+    private boolean archiveDirectory(File dir, ArchiveEntry dirEntry, ArchivingStrategy strategy,
+            int chunkSize, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        final File[] fileEntries = dir.listFiles();
+        if (fileEntries == null)
+        {
+            errorStrategy.dealWithError(new ArchivingException(dir, new IOException(
+                    "Cannot read directory")));
+            return false;
+        }
+        final String hdf5GroupPath = dirEntry.getPath();
+        if ("/".equals(hdf5GroupPath) == false)
+            try
+            {
+                if (hdf5Writer.file().getFileFormat() != FileFormat.STRICTLY_1_8
+                        && fileEntries.length > MIN_GROUP_MEMBER_COUNT_TO_COMPUTE_SIZEHINT)
+                {
+                    // Compute size hint and pre-create group in order to improve performance.
+                    int totalLength = computeSizeHint(fileEntries);
+                    hdf5Writer.object().createGroup(hdf5GroupPath, totalLength * SIZEHINT_FACTOR);
+                } else
+                {
+                    hdf5Writer.object().createGroup(hdf5GroupPath);
+                }
+            } catch (HDF5Exception ex)
+            {
+                errorStrategy.dealWithError(new ArchivingException(hdf5GroupPath, ex));
+            }
+        final List<LinkRecord> linkEntries =
+                DirectoryIndex.convertFilesToLinks(fileEntries, errorStrategy);
+
+        if (entryVisitorOrNull != null)
+        {
+            entryVisitorOrNull.visit(dirEntry);
+        }
+        final Iterator<LinkRecord> linkIt = linkEntries.iterator();
+        for (int i = 0; i < fileEntries.length; ++i)
+        {
+            final File file = fileEntries[i];
+            final LinkRecord link = linkIt.next();
+            if (link == null)
+            {
+                linkIt.remove();
+                continue;
+            }
+            final String absoluteEntry = file.getAbsolutePath();
+            final ArchiveEntry entry =
+                    new ArchiveEntry(hdf5GroupPath, Utils.concatLink(hdf5GroupPath,
+                            link.getLinkName()), link, idCache);
+            if (entry.isDirectory())
+            {
+                if (strategy.doExclude(absoluteEntry, true))
+                {
+                    linkIt.remove();
+                    continue;
+                }
+                final boolean ok =
+                        archiveDirectory(file, entry, strategy, chunkSize, entryVisitorOrNull);
+                if (ok == false)
+                {
+                    linkIt.remove();
+                }
+            } else
+            {
+                if (strategy.doExclude(absoluteEntry, false))
+                {
+                    linkIt.remove();
+                    continue;
+                }
+                if (entry.isSymLink())
+                {
+                    final boolean ok = archiveSymLink(entry, file, entryVisitorOrNull);
+                    if (ok == false)
+                    {
+                        linkIt.remove();
+                    }
+                } else if (entry.isRegularFile())
+                {
+                    final DataSetInfo dataSetInfoOrNull =
+                            tryArchiveFile(file, entry,
+                                    strategy.getStorageFeatureForPath(entry.getPath()), chunkSize,
+                                    entryVisitorOrNull);
+                    if (dataSetInfoOrNull == null)
+                    {
+                        linkIt.remove();
+                    } else
+                    {
+                        link.setSize(dataSetInfoOrNull.size);
+                        link.setCrc32(dataSetInfoOrNull.crc32);
+                    }
+                } else
+                {
+                    errorStrategy.dealWithError(new ArchivingException(file, new IOException(
+                            "Path corresponds to neither a file nor a directory.")));
+                }
+            }
+        }
+
+        final boolean verbose = (entryVisitorOrNull != null);
+        final IDirectoryIndex index = indexProvider.get(hdf5GroupPath, verbose);
+        index.updateIndex(linkEntries);
+        return true;
+    }
+
+    private boolean archiveSymLink(ArchiveEntry entry)
+    {
+        if (entry.hasLinkTarget() == false)
+        {
+            errorStrategy.dealWithError(new ArchivingException(entry.getName(), new IOException(
+                    "Link target not given for symbolic link.")));
+            return false;
+        }
+        return archiveSymLink(entry, null);
+    }
+
+    private boolean archiveSymLink(ArchiveEntry entry, File file,
+            IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        if (entry.hasLinkTarget() == false)
+        {
+            errorStrategy.dealWithError(new ArchivingException(file, new IOException(
+                    "Cannot read link target of symbolic link.")));
+            return false;
+        }
+        return archiveSymLink(entry, entryVisitorOrNull);
+    }
+
+    private boolean archiveSymLink(ArchiveEntry entry, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        try
+        {
+            hdf5Writer.object().createSoftLink(entry.getLinkTarget(), entry.getPath());
+            if (entryVisitorOrNull != null)
+            {
+                entryVisitorOrNull.visit(entry);
+            }
+        } catch (HDF5Exception ex)
+        {
+            errorStrategy.dealWithError(new ArchivingException(entry.getPath(), ex));
+            return false;
+        }
+        return true;
+
+    }
+
+    private static int computeSizeHint(final File[] entries)
+    {
+        int totalLength = 0;
+        for (File entry : entries)
+        {
+            totalLength += entry.getName().length();
+        }
+        return totalLength;
+    }
+
+    private DataSetInfo tryArchiveFile(File file, ArchiveEntry entry,
+            HDF5GenericStorageFeatures features, int chunkSize,
+            IArchiveEntryVisitor entryVisitorOrNull) throws ArchivingException
+    {
+        DataSetInfo info = null;
+        try
+        {
+            info = copyToHDF5(file, entry.getPath(), features, chunkSize);
+            entry.setDataSetInfo(info);
+            if (entryVisitorOrNull != null)
+            {
+                entryVisitorOrNull.visit(entry);
+            }
+        } catch (IOException ex)
+        {
+            errorStrategy.dealWithError(new ArchivingException(file, ex));
+        } catch (HDF5Exception ex)
+        {
+            errorStrategy.dealWithError(new ArchivingException(entry.getPath(), ex));
+        }
+        return info;
+    }
+
+    static String getRelativePath(File root, File filePath)
+    {
+        return getRelativePath(root.getAbsolutePath(), filePath.getAbsolutePath());
+    }
+
+    static String getRelativePath(String parentDirToBeStripped, String filePath)
+    {
+        if (filePath.startsWith(parentDirToBeStripped) == false
+                && parentDirToBeStripped.startsWith(filePath) == false)
+        {
+            throw new IOExceptionUnchecked("Path '" + filePath + "' does not start with '"
+                    + parentDirToBeStripped + "'.");
+        }
+        final String path =
+                (parentDirToBeStripped.length() >= filePath.length()) ? "/" : filePath
+                        .substring(parentDirToBeStripped.length());
+        return FilenameUtils.separatorsToUnix(path);
+    }
+
+    private DataSetInfo copyToHDF5(final File source, final String objectPath,
+            final HDF5GenericStorageFeatures compression, int chunkSize) throws IOException
+    {
+        final InputStream input = FileUtils.openInputStream(source);
+        try
+        {
+            return copyToHDF5(input, objectPath, compression, chunkSize);
+        } finally
+        {
+            IOUtils.closeQuietly(input);
+        }
+    }
+
+    private int getEffectiveChunkSize(int chunkSize)
+    {
+        return (chunkSize <= 0 || chunkSize > buffer.length) ? buffer.length : chunkSize;
+    }
+
+    private DataSetInfo copyToHDF5(final InputStream input, final String objectPath,
+            final HDF5GenericStorageFeatures compression, int chunkSize) throws IOException
+    {
+        final int effectiveBufferLength = getEffectiveChunkSize(chunkSize);
+        final CRC32 crc32 = new CRC32();
+        HDF5GenericStorageFeatures features = compression;
+        int n = fillBuffer(input, effectiveBufferLength);
+        // Deal with small data sources separately to keep the file size smaller
+        if (n < effectiveBufferLength)
+        {
+            // For data sets roughly up to 4096 bytes the overhead of a chunked data set outweighs
+            // the saving of the compression.
+            if (n <= SMALL_DATASET_LIMIT || features.isDeflating() == false)
+            {
+                features = HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS;
+            }
+            final HDF5OpaqueType type =
+                    hdf5Writer.opaque().createArray(objectPath, OPAQUE_TAG_FILE, n, features);
+            hdf5Writer.opaque().writeArrayBlockWithOffset(objectPath, type, buffer, n, 0);
+            crc32.update(buffer, 0, n);
+            return new DataSetInfo(n, (int) crc32.getValue());
+        }
+
+        final HDF5OpaqueType type =
+                hdf5Writer.opaque().createArray(objectPath, OPAQUE_TAG_FILE, 0,
+                        effectiveBufferLength, compression);
+        long count = 0;
+        while (n > 0)
+        {
+            hdf5Writer.opaque().writeArrayBlockWithOffset(objectPath, type, buffer, n, count);
+            count += n;
+            crc32.update(buffer, 0, n);
+            n = fillBuffer(input, effectiveBufferLength);
+        }
+        return new DataSetInfo(count, (int) crc32.getValue());
+    }
+
+    private int fillBuffer(InputStream input, int bufferLength) throws IOException
+    {
+        int ofs = 0;
+        int len = bufferLength;
+        int count = 0;
+        int n = 0;
+        while (len > 0 && -1 != (n = input.read(buffer, ofs, len)))
+        {
+            ofs += n;
+            len -= n;
+            count += n;
+        }
+        return count;
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5Archiver.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5Archiver.java
new file mode 100644
index 0000000..4d44297
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5Archiver.java
@@ -0,0 +1,955 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.Closeable;
+import java.io.File;
+import java.io.Flushable;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.io.AdapterIInputStreamToInputStream;
+import ch.systemsx.cisd.base.io.AdapterIOutputStreamToOutputStream;
+import ch.systemsx.cisd.base.io.IInputStream;
+import ch.systemsx.cisd.base.io.IOutputStream;
+import ch.systemsx.cisd.hdf5.HDF5DataBlock;
+import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.SyncMode;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewDirectoryArchiveEntry;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewFileArchiveEntry;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewSymLinkArchiveEntry;
+import ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory;
+
+/**
+ * An archiver based on HDF5 as archive format for directory with fast random access to particular
+ * files in the archive.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5Archiver implements Closeable, Flushable, IHDF5Archiver, IHDF5ArchiveInfoProvider
+{
+    private static final String HOUSEKEEPING_SUFFIX = "\1\0";
+
+    public static final int CHUNK_SIZE_AUTO = -1;
+
+    private final static int MB = 1024 * 1024;
+
+    final static int BUFFER_SIZE = 10 * MB;
+
+    private final IHDF5Reader hdf5Reader;
+
+    private final IHDF5Writer hdf5WriterOrNull;
+
+    private final boolean closeReaderOnCloseFile;
+
+    private final IErrorStrategy errorStrategy;
+
+    private final IDirectoryIndexProvider indexProvider;
+
+    private final byte[] buffer;
+
+    private final HDF5ArchiveUpdater updaterOrNull;
+
+    private final HDF5ArchiveDeleter deleterOrNull;
+
+    private final HDF5ArchiveTraverser processor;
+
+    private final IdCache idCache;
+
+    static IHDF5Reader createHDF5Reader(File archiveFile)
+    {
+        return HDF5FactoryProvider.get().openForReading(archiveFile);
+    }
+
+    static IHDF5Writer createHDF5Writer(File archiveFile, FileFormat fileFormat, boolean noSync)
+    {
+        final IHDF5WriterConfigurator config = HDF5FactoryProvider.get().configure(archiveFile);
+        config.fileFormat(fileFormat);
+        config.useUTF8CharacterEncoding();
+        config.houseKeepingNameSuffix(HOUSEKEEPING_SUFFIX);
+        if (noSync == false)
+        {
+            config.syncMode(SyncMode.SYNC);
+        }
+        return config.writer();
+    }
+
+    HDF5Archiver(File archiveFile, boolean readOnly)
+    {
+        this(archiveFile, readOnly, false, FileFormat.STRICTLY_1_6, null);
+    }
+
+    HDF5Archiver(File archiveFile, boolean readOnly, boolean noSync, FileFormat fileFormat,
+            IErrorStrategy errorStrategyOrNull)
+    {
+        this.buffer = new byte[BUFFER_SIZE];
+        this.closeReaderOnCloseFile = true;
+        this.hdf5WriterOrNull = readOnly ? null : createHDF5Writer(archiveFile, fileFormat, noSync);
+        this.hdf5Reader =
+                (hdf5WriterOrNull != null) ? hdf5WriterOrNull : createHDF5Reader(archiveFile);
+        if (errorStrategyOrNull == null)
+        {
+            this.errorStrategy = IErrorStrategy.DEFAULT_ERROR_STRATEGY;
+        } else
+        {
+            this.errorStrategy = errorStrategyOrNull;
+        }
+        this.indexProvider = new DirectoryIndexProvider(hdf5Reader, errorStrategy);
+        this.idCache = new IdCache();
+        this.processor = new HDF5ArchiveTraverser(new HDF5ArchiveTraverser.IDirectoryChecker()
+            {
+                @Override
+                public boolean isDirectoryFollowSymlinks(ArchiveEntry entry)
+                {
+                    final ArchiveEntry resolvedEntry = tryResolveLink(entry);
+                    return (resolvedEntry == null) ? false : resolvedEntry.isDirectory();
+                }
+            }, hdf5Reader, indexProvider, idCache);
+        if (hdf5WriterOrNull == null)
+        {
+            this.updaterOrNull = null;
+            this.deleterOrNull = null;
+        } else
+        {
+            this.updaterOrNull =
+                    new HDF5ArchiveUpdater(hdf5WriterOrNull, indexProvider, idCache, buffer);
+            this.deleterOrNull = new HDF5ArchiveDeleter(hdf5WriterOrNull, indexProvider, idCache);
+        }
+    }
+
+    HDF5Archiver(IHDF5Reader reader, boolean enforceReadOnly, IErrorStrategy errorStrategyOrNull)
+    {
+        this.buffer = new byte[BUFFER_SIZE];
+        this.closeReaderOnCloseFile = false;
+        this.hdf5WriterOrNull =
+                (enforceReadOnly == false && reader instanceof IHDF5Writer) ? (IHDF5Writer) reader
+                        : null;
+        if (errorStrategyOrNull == null)
+        {
+            this.errorStrategy = IErrorStrategy.DEFAULT_ERROR_STRATEGY;
+        } else
+        {
+            this.errorStrategy = errorStrategyOrNull;
+        }
+        this.hdf5Reader = reader;
+        this.indexProvider = new DirectoryIndexProvider(hdf5Reader, errorStrategy);
+        this.idCache = new IdCache();
+        this.processor = new HDF5ArchiveTraverser(new HDF5ArchiveTraverser.IDirectoryChecker()
+            {
+                @Override
+                public boolean isDirectoryFollowSymlinks(ArchiveEntry entry)
+                {
+                    return tryResolveLink(entry).isDirectory();
+                }
+            }, hdf5Reader, indexProvider, idCache);
+        if (hdf5WriterOrNull == null)
+        {
+            this.updaterOrNull = null;
+            this.deleterOrNull = null;
+        } else
+        {
+            this.updaterOrNull =
+                    new HDF5ArchiveUpdater(hdf5WriterOrNull, indexProvider, idCache, buffer);
+            this.deleterOrNull = new HDF5ArchiveDeleter(hdf5WriterOrNull, indexProvider, idCache);
+        }
+    }
+
+    //
+    // Closeable
+    //
+
+    @Override
+    public void close()
+    {
+        if (isClosed() == false)
+        {
+            flush();
+        }
+        if (closeReaderOnCloseFile)
+        {
+            hdf5Reader.close();
+        } else
+        {
+            indexProvider.close();
+        }
+    }
+
+    @Override
+    public boolean isClosed()
+    {
+        return hdf5Reader.file().isClosed();
+    }
+
+    //
+    // Flusheable
+    //
+
+    @Override
+    public void flush()
+    {
+        if (hdf5WriterOrNull != null)
+        {
+            hdf5WriterOrNull.file().flush();
+        }
+    }
+
+    //
+    // IHDF5ArchiveInfo
+    //
+
+    @Override
+    public boolean exists(String path)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        return indexProvider.get(parentPath, false).exists(name);
+    }
+
+    @Override
+    public boolean isDirectory(String path)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        return indexProvider.get(parentPath, false).isDirectory(name);
+    }
+
+    @Override
+    public boolean isRegularFile(String path)
+    {
+        return isRegularFile(tryGetLink(path, false));
+    }
+
+    @Override
+    public boolean isSymLink(String path)
+    {
+        return isSymLink(tryGetLink(path, false));
+    }
+
+    @Override
+    public ArchiveEntry tryGetEntry(String path, boolean readLinkTarget)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        if ("/".equals(normalizedPath))
+        {
+            return new ArchiveEntry("", "/", LinkRecord.getLinkRecordForArchiveRoot(hdf5Reader
+                    .file().getFile()), idCache);
+        }
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        return Utils.tryToArchiveEntry(parentPath, normalizedPath,
+                indexProvider.get(parentPath, readLinkTarget).tryGetLink(name), idCache);
+    }
+
+    private LinkRecord tryGetLink(String path, boolean readLinkTargets)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        return indexProvider.get(parentPath, readLinkTargets).tryGetLink(name);
+    }
+
+    @Override
+    public ArchiveEntry tryResolveLink(ArchiveEntry entry)
+    {
+        if (entry == null)
+        {
+            return null;
+        }
+        ArchiveEntry workEntry = entry;
+        String firstPath = null;
+        if (entry.isSymLink())
+        {
+            Set<String> workPathSet = null;
+            while (workEntry != null && workEntry.isSymLink())
+            {
+                if (firstPath == null)
+                {
+                    firstPath = workEntry.getPath();
+                } else
+                {
+                    if (workPathSet == null)
+                    {
+                        workPathSet = new HashSet<String>();
+                        workPathSet.add(firstPath);
+                    }
+                    if (workPathSet.contains(workEntry.getPath()))
+                    {
+                        // The link targets form a loop, resolve to null.
+                        return null;
+                    }
+                    workPathSet.add(workEntry.getPath());
+                }
+                String linkTarget;
+                if (workEntry.hasLinkTarget())
+                {
+                    linkTarget = workEntry.getLinkTarget();
+                } else
+                {
+                    workEntry = tryGetEntry(workEntry.getPath(), true);
+                    linkTarget = workEntry.getLinkTarget();
+                }
+                if (linkTarget.startsWith("/") == false)
+                {
+                    linkTarget = Utils.concatLink(workEntry.getParentPath(), linkTarget);
+                }
+                linkTarget = Utils.normalizePath(linkTarget);
+                if (linkTarget == null) // impossible link target like '/..'
+                {
+                    return null;
+                }
+                workEntry = tryGetEntry(linkTarget, true);
+            }
+        }
+        return workEntry;
+    }
+
+    @Override
+    public ArchiveEntry tryGetResolvedEntry(String path, boolean keepPath)
+    {
+        final ArchiveEntry entry = tryGetEntry(path, true);
+        ArchiveEntry resolvedEntry = tryResolveLink(entry);
+        if (resolvedEntry == null)
+        {
+            return null;
+        }
+        if (entry != resolvedEntry && keepPath)
+        {
+            resolvedEntry = new ArchiveEntry(entry, resolvedEntry);
+        }
+        return resolvedEntry;
+    }
+
+    private static boolean isRegularFile(LinkRecord linkOrNull)
+    {
+        return linkOrNull != null && linkOrNull.isRegularFile();
+    }
+
+    private static boolean isSymLink(LinkRecord linkOrNull)
+    {
+        return linkOrNull != null && linkOrNull.isSymLink();
+    }
+
+    //
+    // IHDF5ArchiveReader
+    //
+
+    @Override
+    public List<ArchiveEntry> list()
+    {
+        return list("/", ListParameters.DEFAULT);
+    }
+
+    @Override
+    public List<ArchiveEntry> list(final String fileOrDir)
+    {
+        return list(fileOrDir, ListParameters.DEFAULT);
+    }
+
+    @Override
+    public List<ArchiveEntry> list(final String fileOrDir, final ListParameters params)
+    {
+        final List<ArchiveEntry> result = new ArrayList<ArchiveEntry>(100);
+        list(fileOrDir, new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    result.add(entry);
+                }
+            }, params);
+        return result;
+    }
+
+    @Override
+    public List<ArchiveEntry> test()
+    {
+        final List<ArchiveEntry> result = new ArrayList<ArchiveEntry>(100);
+        list("/", new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    if (entry.isOK() == false)
+                    {
+                        result.add(entry);
+                    }
+                }
+            }, ListParameters.TEST);
+        return result;
+    }
+
+    @Override
+    public IHDF5Archiver list(String fileOrDir, IArchiveEntryVisitor visitor)
+    {
+        return list(fileOrDir, visitor, ListParameters.DEFAULT);
+    }
+
+    @Override
+    public IHDF5Archiver list(final String fileOrDir, final IArchiveEntryVisitor visitor,
+            final ListParameters params)
+    {
+        final String normalizedPath = Utils.normalizePath(fileOrDir);
+        final IArchiveEntryVisitor decoratedVisitor = new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    if (params.isIncludeTopLevelDirectoryEntry() == false
+                            && normalizedPath.equals(entry.getPath()))
+                    {
+                        return;
+                    }
+                    ArchiveEntry workEntry = entry;
+                    if (workEntry.isSymLink() && params.isResolveSymbolicLinks())
+                    {
+                        workEntry = tryResolveLink(workEntry);
+                        if (workEntry == null)
+                        {
+                            return;
+                        }
+                        if (workEntry != entry)
+                        {
+                            workEntry = new ArchiveEntry(entry, workEntry);
+                        }
+                    }
+                    if (params.isSuppressDirectoryEntries() == false
+                            || workEntry.isDirectory() == false)
+                    {
+                        visitor.visit(workEntry);
+                    }
+                }
+            };
+        final ArchiveEntryListProcessor listProcessor =
+                new ArchiveEntryListProcessor(decoratedVisitor, buffer, params.isTestArchive());
+        processor.process(normalizedPath, params.isRecursive(), params.isReadLinkTargets(),
+                params.isFollowSymbolicLinks(), listProcessor);
+        return this;
+    }
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(File rootDirectoryOnFS)
+    {
+        return verifyAgainstFilesystem("/", rootDirectoryOnFS, VerifyParameters.DEFAULT);
+    }
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, VerifyParameters.DEFAULT);
+    }
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, visitor,
+                VerifyParameters.DEFAULT);
+    }
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            VerifyParameters params)
+    {
+        final List<ArchiveEntry> verifyErrors = new ArrayList<ArchiveEntry>();
+        verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    if (entry.isOK() == false)
+                        verifyErrors.add(entry);
+                }
+            }, params);
+        return verifyErrors;
+    }
+
+    @SuppressWarnings("null")
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitorOrNull,
+            VerifyParameters params)
+    {
+        final Set<File> filesOnFSOrNull =
+                (missingArchiveEntryVisitorOrNull != null) ? getFiles(rootDirectoryOnFS,
+                        params.isRecursive()) : null;
+        final ArchiveEntryVerifyProcessor verifyProcessor =
+                new ArchiveEntryVerifyProcessor(visitor, rootDirectoryOnFS, filesOnFSOrNull,
+                        buffer, params.isVerifyAttributes(), params.isNumeric());
+        processor.process(fileOrDir, params.isRecursive(), true, false, verifyProcessor);
+        if (filesOnFSOrNull != null && filesOnFSOrNull.isEmpty() == false)
+        {
+            for (File f : filesOnFSOrNull)
+            {
+                missingArchiveEntryVisitorOrNull.visit(new ArchiveEntry(HDF5ArchiveUpdater
+                        .getRelativePath(rootDirectoryOnFS, f.getParentFile()), HDF5ArchiveUpdater
+                        .getRelativePath(rootDirectoryOnFS, f), LinkRecord.getLinkRecordForLink(f),
+                        idCache));
+            }
+        }
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, VerifyParameters params)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, visitor, null, params);
+    }
+
+    @SuppressWarnings("null")
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor,
+            IArchiveEntryVisitor missingArchiveEntryVisitorOrNull, VerifyParameters params)
+    {
+        final Set<File> filesOnFSOrNull =
+                (missingArchiveEntryVisitorOrNull != null) ? getFiles(rootDirectoryOnFS,
+                        params.isRecursive()) : null;
+        final ArchiveEntryVerifyProcessor verifyProcessor =
+                new ArchiveEntryVerifyProcessor(visitor, rootDirectoryOnFS, filesOnFSOrNull,
+                        rootDirectoryInArchive, buffer, params.isVerifyAttributes(),
+                        params.isNumeric());
+        processor.process(fileOrDir, params.isRecursive(), true, false, verifyProcessor);
+        if (filesOnFSOrNull != null && filesOnFSOrNull.isEmpty() == false)
+        {
+            for (File f : filesOnFSOrNull)
+            {
+                missingArchiveEntryVisitorOrNull.visit(new ArchiveEntry(f.getParent(), f.getPath(),
+                        LinkRecord.getLinkRecordForLink(f), idCache));
+            }
+        }
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor, VerifyParameters params)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, rootDirectoryInArchive,
+                visitor, null, params);
+    }
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitor)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, visitor,
+                missingArchiveEntryVisitor, VerifyParameters.DEFAULT);
+    }
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, VerifyParameters params)
+    {
+        final List<ArchiveEntry> verifyErrors = new ArrayList<ArchiveEntry>();
+        verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, rootDirectoryInArchive,
+                new IArchiveEntryVisitor()
+                    {
+                        @Override
+                        public void visit(ArchiveEntry entry)
+                        {
+                            if (entry.isOK() == false)
+                            {
+                                verifyErrors.add(entry);
+                            }
+                        }
+                    }, params);
+        return verifyErrors;
+    }
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive)
+    {
+        return verifyAgainstFilesystem(fileOrDir, rootDirectoryOnFS, rootDirectoryInArchive,
+                VerifyParameters.DEFAULT);
+    }
+
+    private static Set<File> getFiles(File fsRoot, boolean recursive)
+    {
+        final Set<File> result = new HashSet<File>();
+        if (recursive && fsRoot.isDirectory())
+        {
+            addFilesInDir(fsRoot, result);
+        }
+        return result;
+    }
+
+    private static void addFilesInDir(File dir, Set<File> files)
+    {
+        for (File f : dir.listFiles())
+        {
+            files.add(f);
+            if (f.isDirectory())
+            {
+                addFilesInDir(f, files);
+            }
+        }
+    }
+
+    @Override
+    public IHDF5Archiver extractFile(String path, OutputStream out) throws IOExceptionUnchecked
+    {
+        if (hdf5Reader.object().isDataSet(path) == false)
+        {
+            errorStrategy.dealWithError(new UnarchivingException(path, "not found in archive"));
+            return this;
+        }
+        try
+        {
+            for (HDF5DataBlock<byte[]> block : hdf5Reader.opaque().getArrayNaturalBlocks(path))
+            {
+                out.write(block.getData());
+            }
+        } catch (IOException ex)
+        {
+            errorStrategy.dealWithError(new UnarchivingException(new File("stdout"), ex));
+        }
+        return this;
+    }
+
+    @Override
+    public byte[] extractFileAsByteArray(String path) throws IOExceptionUnchecked
+    {
+        final ByteArrayOutputStream out = new ByteArrayOutputStream();
+        extractFile(path, out);
+        return out.toByteArray();
+    }
+
+    @Override
+    public IInputStream extractFileAsIInputStream(String path)
+    {
+        if (hdf5Reader.object().isDataSet(path) == false)
+        {
+            errorStrategy.dealWithError(new UnarchivingException(path, "not found in archive"));
+            return null;
+        }
+        return HDF5IOAdapterFactory.asIInputStream(hdf5Reader, path);
+    }
+
+    @Override
+    public InputStream extractFileAsInputStream(String path)
+    {
+        return new AdapterIInputStreamToInputStream(extractFileAsIInputStream(path));
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory) throws IllegalStateException
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, "", "/", ArchivingStrategy.DEFAULT,
+                null);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path)
+            throws IllegalStateException
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, "", path,
+                ArchivingStrategy.DEFAULT, null);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path,
+            IArchiveEntryVisitor visitorOrNull) throws IllegalStateException
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, "", path,
+                ArchivingStrategy.DEFAULT, visitorOrNull);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull)
+            throws IllegalStateException
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, "", path, strategy, visitorOrNull);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive)
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, rootPathInArchive, "",
+                ArchivingStrategy.DEFAULT, null);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive, IArchiveEntryVisitor visitorOrNull)
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, rootPathInArchive, "",
+                ArchivingStrategy.DEFAULT, visitorOrNull);
+
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive, ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull)
+    {
+        return extractToFilesystemBelowDirectory(rootDirectory, rootPathInArchive, "", strategy,
+                visitorOrNull);
+    }
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive, String path, ArchivingStrategy strategy,
+            IArchiveEntryVisitor visitorOrNull) throws IllegalStateException
+    {
+        final IArchiveEntryProcessor extractor =
+                new ArchiveEntryExtractProcessor(visitorOrNull, strategy, rootDirectory,
+                        rootPathInArchive, buffer);
+        processor.process(Utils.concatLink(rootPathInArchive, path), true, true, false, extractor);
+        return this;
+    }
+
+    //
+    // IHDF5Archiver
+    //
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File path) throws IllegalStateException
+    {
+        return archiveFromFilesystem(path, ArchivingStrategy.DEFAULT, false,
+                (IArchiveEntryVisitor) null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File path, ArchivingStrategy strategy)
+            throws IllegalStateException
+    {
+        return archiveFromFilesystem(path, strategy, false, (IArchiveEntryVisitor) null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File path, IArchiveEntryVisitor entryVisitorOrNull)
+            throws IllegalStateException
+    {
+        return archiveFromFilesystem(path, ArchivingStrategy.DEFAULT, false, entryVisitorOrNull);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File path, ArchivingStrategy strategy,
+            boolean keepNameFromPath, IArchiveEntryVisitor entryVisitorOrNull)
+            throws IllegalStateException
+    {
+        checkReadWrite();
+        updaterOrNull
+                .archive(path, strategy, CHUNK_SIZE_AUTO, keepNameFromPath, entryVisitorOrNull);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path)
+            throws IllegalStateException
+    {
+        return archiveFromFilesystem(parentDirToStrip, path, ArchivingStrategy.DEFAULT);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path,
+            ArchivingStrategy strategy) throws IllegalStateException
+    {
+        return archiveFromFilesystem(parentDirToStrip, path, strategy, null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor entryVisitorOrNull)
+            throws IllegalStateException
+    {
+        checkReadWrite();
+        updaterOrNull
+                .archive(parentDirToStrip, path, strategy, CHUNK_SIZE_AUTO, entryVisitorOrNull);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path)
+    {
+        return archiveFromFilesystem(rootInArchive, path, ArchivingStrategy.DEFAULT, null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path,
+            ArchivingStrategy strategy)
+    {
+        return archiveFromFilesystem(rootInArchive, path, strategy, null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        checkReadWrite();
+        updaterOrNull.archive(rootInArchive, path, strategy, CHUNK_SIZE_AUTO, entryVisitorOrNull);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory)
+    {
+        return archiveFromFilesystemBelowDirectory(rootInArchive, directory,
+                ArchivingStrategy.DEFAULT, null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            ArchivingStrategy strategy)
+    {
+        return archiveFromFilesystemBelowDirectory(rootInArchive, directory, strategy, null);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            IArchiveEntryVisitor visitor)
+    {
+        return archiveFromFilesystemBelowDirectory(rootInArchive, directory,
+                ArchivingStrategy.DEFAULT, visitor);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            ArchivingStrategy strategy, IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        checkReadWrite();
+        updaterOrNull.archiveBelow(rootInArchive, directory, strategy, CHUNK_SIZE_AUTO,
+                entryVisitorOrNull);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveFile(String path, byte[] data) throws IllegalStateException
+    {
+        return archiveFile(NewArchiveEntry.file(path), new ByteArrayInputStream(data));
+    }
+
+    @Override
+    public IHDF5Archiver archiveFile(String path, InputStream input)
+    {
+        return archiveFile(NewArchiveEntry.file(path), input);
+    }
+
+    @Override
+    public IHDF5Archiver archiveFile(NewFileArchiveEntry entry, byte[] data)
+    {
+        return archiveFile(entry, new ByteArrayInputStream(data));
+    }
+
+    @Override
+    public OutputStream archiveFileAsOutputStream(NewFileArchiveEntry entry)
+    {
+        return new AdapterIOutputStreamToOutputStream(archiveFileAsIOutputStream(entry));
+    }
+
+    @Override
+    public IOutputStream archiveFileAsIOutputStream(NewFileArchiveEntry entry)
+    {
+        checkReadWrite();
+        final LinkRecord link = new LinkRecord(entry);
+        final IOutputStream stream =
+                updaterOrNull.archiveFile(entry.getParentPath(), link, entry.isCompress(),
+                        entry.getChunkSize());
+        return stream;
+    }
+
+    @Override
+    public IHDF5Archiver archiveFile(NewFileArchiveEntry entry, InputStream input)
+    {
+        checkReadWrite();
+        final LinkRecord link = new LinkRecord(entry);
+        updaterOrNull.archive(entry.getParentPath(), link, input, entry.isCompress(),
+                entry.getChunkSize());
+        entry.setCrc32(link.getCrc32());
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveSymlink(String path, String linkTarget)
+    {
+        return archiveSymlink(NewArchiveEntry.symlink(path, linkTarget));
+    }
+
+    @Override
+    public IHDF5Archiver archiveSymlink(NewSymLinkArchiveEntry entry)
+    {
+        checkReadWrite();
+        final LinkRecord link = new LinkRecord(entry);
+        updaterOrNull.archive(entry.getParentPath(), link, null, false, CHUNK_SIZE_AUTO);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver archiveDirectory(String path)
+    {
+        return archiveDirectory(NewArchiveEntry.directory(path));
+    }
+
+    @Override
+    public IHDF5Archiver archiveDirectory(NewDirectoryArchiveEntry entry)
+            throws IllegalStateException, IllegalArgumentException
+    {
+        checkReadWrite();
+        final LinkRecord link = new LinkRecord(entry);
+        updaterOrNull.archive(entry.getParentPath(), link, null, false, CHUNK_SIZE_AUTO);
+        return this;
+    }
+
+    @Override
+    public IHDF5Archiver delete(String hdf5ObjectPath)
+    {
+        return delete(Collections.singletonList(hdf5ObjectPath), null);
+    }
+
+    @Override
+    public IHDF5Archiver delete(List<String> hdf5ObjectPaths)
+    {
+        return delete(hdf5ObjectPaths, null);
+    }
+
+    @Override
+    public IHDF5Archiver delete(List<String> hdf5ObjectPaths,
+            IArchiveEntryVisitor entryVisitorOrNull)
+    {
+        checkReadWrite();
+        deleterOrNull.delete(hdf5ObjectPaths, entryVisitorOrNull);
+        return this;
+    }
+
+    private void checkReadWrite()
+    {
+        if (updaterOrNull == null)
+        {
+            throw new IllegalStateException("Cannot update archive in read-only mode.");
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverFactory.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverFactory.java
new file mode 100644
index 0000000..540c797
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverFactory.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * A factory for {@link IHDF5Archiver}
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5ArchiverFactory
+{
+    /**
+     * Opens an HDF5 archive <var>file</var> for writing and reading.
+     * 
+     * @param file The archive file to open. If the archive file does not yet exist, it will be
+     *            created.
+     */
+    public static IHDF5Archiver open(File file)
+    {
+        return new HDF5Archiver(file, false);
+    }
+
+    /**
+     * Opens an HDF5 archive <var>file</var> for writing and reading.
+     * 
+     * @param file The archive file to open. If the archive file does not yet exist, it will be
+     *            created.
+     * @param errorStrategyOrNull The {@link IErrorStrategy} to use on errors when accessing the
+     *            archive. May be <code>null</code>, in which case every error just causes an
+     *            exception.
+     */
+    public static IHDF5Archiver open(File file, IErrorStrategy errorStrategyOrNull)
+    {
+        return new HDF5Archiver(file, false, false, FileFormat.STRICTLY_1_6, errorStrategyOrNull);
+    }
+
+    /**
+     * Opens an HDF5 archive <var>file</var> for writing and reading.
+     * 
+     * @param file The archive file to open. If the archive file does not yet exist, it will be
+     *            created.
+     * @param noSync if <code>true</code>, no <code>sync</code> call will be performed on closing
+     *            the file.
+     * @param fileFormat The HDF5 file format to use for the archive.
+     * @param errorStrategyOrNull The {@link IErrorStrategy} to use on errors when accessing the
+     *            archive. May be <code>null</code>, in which case every error just causes an
+     *            exception.
+     */
+    public static IHDF5Archiver open(File file, boolean noSync, FileFormat fileFormat,
+            IErrorStrategy errorStrategyOrNull)
+    {
+        return new HDF5Archiver(file, false, noSync, fileFormat, errorStrategyOrNull);
+    }
+
+    /**
+     * Opens an HDF5 archive file named <var>filePath</var> for writing and reading.
+     * 
+     * @param filePath The path of the archive file to open. If the archive file does not yet exist,
+     *            it will be created.
+     */
+    public static IHDF5Archiver open(String filePath)
+    {
+        return new HDF5Archiver(new File(filePath), false);
+    }
+
+    /**
+     * Opens an HDF5 archive file named <var>filePath</var> for writing and reading.
+     * 
+     * @param filePath The path of the archive file to open. If the archive file does not yet exist,
+     *            it will be created.
+     * @param errorStrategyOrNull The {@link IErrorStrategy} to use on errors when accessing the
+     *            archive. May be <code>null</code>, in which case every error just causes an
+     *            exception.
+     */
+    public static IHDF5Archiver open(String filePath, IErrorStrategy errorStrategyOrNull)
+    {
+        return new HDF5Archiver(new File(filePath), false, false, FileFormat.STRICTLY_1_6,
+                errorStrategyOrNull);
+    }
+
+    /**
+     * Opens an HDF5 archive file named <var>filePath</var> for writing and reading.
+     * 
+     * @param filePath The path of the archive file to open. If the archive file does not yet exist,
+     *            it will be created.
+     * @param noSync if <code>true</code>, no <code>sync</code> call will be performed on closing
+     *            the file.
+     * @param fileFormat The HDF5 file format to use for the archive.
+     * @param errorStrategyOrNull The {@link IErrorStrategy} to use on errors when accessing the
+     *            archive. May be <code>null</code>, in which case every error just causes an
+     *            exception.
+     */
+    public static IHDF5Archiver open(String filePath, boolean noSync, FileFormat fileFormat,
+            IErrorStrategy errorStrategyOrNull)
+    {
+        return new HDF5Archiver(new File(filePath), false, noSync, fileFormat, errorStrategyOrNull);
+    }
+
+    /**
+     * Opens an HDF5 archive file based on an HDF writer for writing and reading.
+     * 
+     * @param writer The HDF5 writer to base the archive file on. Closing the archive writer will
+     *            not close the HDF5 <var>writer</var>.
+     */
+    public static IHDF5Archiver open(IHDF5Writer writer)
+    {
+        return new HDF5Archiver(writer, false, null);
+    }
+
+    /**
+     * Opens an HDF5 archive file based on an HDF writer for writing and reading.
+     * 
+     * @param writer The HDF5 writer to base the archive file on. Closing the archive writer will
+     *            not close the HDF5 <var>writer</var>. It is recommended that you configure the
+     *            writer with <code>IHDF5WriterConfigurator.houseKeepingNameSuffix("\\1\\0)")</code>
+     *            so that internal house-keeping files cannot overwrite archived files. .
+     * @param errorStrategy The {@link IErrorStrategy} to use on errors when accessing the archive.
+     */
+    public static IHDF5Archiver open(IHDF5Writer writer, IErrorStrategy errorStrategy)
+    {
+        return new HDF5Archiver(writer, false, errorStrategy);
+    }
+
+    /**
+     * Opens an HDF5 archive <var>file</var> for reading.
+     * 
+     * @param file The archive file to open. It is an error if the archive file does not exist.
+     */
+    public static IHDF5ArchiveReader openForReading(File file)
+    {
+        return new HDF5Archiver(file, true);
+    }
+
+    /**
+     * Opens an HDF5 archive <var>file</var> for reading.
+     * 
+     * @param file The archive file to open. It is an error if the archive file does not exist.
+     * @param errorStrategy The {@link IErrorStrategy} to use on errors when accessing the archive.
+     */
+    public static IHDF5ArchiveReader openForReading(File file, IErrorStrategy errorStrategy)
+    {
+        return new HDF5Archiver(file, true, true, FileFormat.STRICTLY_1_6, errorStrategy);
+    }
+
+    /**
+     * Opens an HDF5 archive file named <var>filePath</var> for reading.
+     * 
+     * @param filePath The path of the archive file to open. It is an error if the archive file does
+     *            not exist.
+     */
+    public static IHDF5ArchiveReader openForReading(String filePath)
+    {
+        return new HDF5Archiver(new File(filePath), true);
+    }
+
+    /**
+     * Opens an HDF5 archive file named <var>filePath</var> for reading.
+     * 
+     * @param filePath The path of the archive file to open. It is an error if the archive file does
+     *            not exist.
+     * @param errorStrategy The {@link IErrorStrategy} to use on errors when accessing the archive.
+     */
+    public static IHDF5ArchiveReader openForReading(String filePath, IErrorStrategy errorStrategy)
+    {
+        return new HDF5Archiver(new File(filePath), true, true, FileFormat.STRICTLY_1_6,
+                errorStrategy);
+    }
+
+    /**
+     * Opens an HDF5 archive file based on an HDF5 reader.
+     * 
+     * @param reader The HDF5 reader to use as the source of the archive. Closing the archive reader
+     *            will not close the HDF5 <var>reader</var>.
+     * @param errorStrategy The {@link IErrorStrategy} to use on errors when accessing the archive.
+     */
+    public static IHDF5ArchiveReader openForReading(IHDF5Reader reader, IErrorStrategy errorStrategy)
+    {
+        return new HDF5Archiver(reader, true, errorStrategy);
+    }
+
+    /**
+     * Opens an HDF5 archive file based on an HDF5 reader.
+     * 
+     * @param reader The HDF5 reader to use as the source of the archive. Closing the archive reader
+     *            will not close the HDF5 <var>reader</var>.
+     */
+    public static IHDF5ArchiveReader openForReading(IHDF5Reader reader)
+    {
+        return new HDF5Archiver(reader, true, null);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverMain.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverMain.java
new file mode 100644
index 0000000..d961f23
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverMain.java
@@ -0,0 +1,634 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.apache.commons.io.FilenameUtils;
+
+import ch.systemsx.cisd.args4j.Argument;
+import ch.systemsx.cisd.args4j.CmdLineException;
+import ch.systemsx.cisd.args4j.CmdLineParser;
+import ch.systemsx.cisd.args4j.ExampleMode;
+import ch.systemsx.cisd.args4j.Option;
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.hdf5.BuildAndEnvironmentInfo;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+
+/**
+ * The main class of the HDF5 based archiver.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5ArchiverMain
+{
+
+    private static final String FILE_EXTENSION_H5 = ".h5";
+
+    private static final String FILE_EXTENSION_H5AR = ".h5ar";
+
+    private enum Command
+    {
+        ARCHIVE(new String[]
+            { "A", "AR", "ARCHIVE" }, false), CAT(new String[]
+            { "C", "CT", "CAT" }, true), EXTRACT(new String[]
+            { "E", "EX", "EXTRACT" }, true), DELETE(new String[]
+            { "D", "RM", "DELETE", "REMOVE" }, false), LIST(new String[]
+            { "L", "LS", "LIST" }, true), VERIFY(new String[]
+            { "V", "VF", "VERIFY" }, true), HELP(new String[]
+            { "H", "HELP" }, true);
+
+        String[] forms;
+
+        boolean readOnly;
+
+        Command(String[] forms, boolean readOnly)
+        {
+            this.forms = forms;
+            this.readOnly = readOnly;
+        }
+
+        boolean isReadOnly()
+        {
+            return readOnly;
+        }
+
+        static Command parse(String commandStr)
+        {
+            final String commandStrU = commandStr.toUpperCase();
+            for (Command cmd : values())
+            {
+                for (String frm : cmd.forms)
+                {
+                    if (frm.equals(commandStrU))
+                    {
+                        return cmd;
+                    }
+                }
+            }
+            return HELP;
+        }
+    }
+
+    private final static IErrorStrategy ERROR_STRATEGY_CONTINUE = new IErrorStrategy()
+        {
+            @Override
+            public void dealWithError(Throwable th) throws ArchiverException
+            {
+                System.err.println(th.getMessage());
+            }
+
+            @Override
+            public void warning(String message)
+            {
+                System.err.println(message);
+            }
+        };
+
+    @Argument
+    private List<String> arguments;
+
+    private Command command;
+
+    private File archiveFile;
+
+    private final boolean initializationOK;
+
+    @Option(name = "i", longName = "include", metaVar = "REGEX", skipForExample = true, usage = "Regex of files to include")
+    private List<String> fileWhiteList = new ArrayList<String>();
+
+    @Option(name = "e", longName = "exclude", metaVar = "REGEX", usage = "Regex of files to exclude")
+    private List<String> fileBlackList = new ArrayList<String>();
+
+    @Option(name = "I", longName = "include-dirs", metaVar = "REGEX", skipForExample = true, usage = "Regex of directories to include")
+    private List<String> dirWhiteList = new ArrayList<String>();
+
+    @Option(name = "E", longName = "exclude-dirs", metaVar = "REGEX", skipForExample = true, usage = "Regex of directories to exclude")
+    private List<String> dirBlackList = new ArrayList<String>();
+
+    @Option(name = "c", longName = "compress", metaVar = "REGEX", skipForExample = true, usage = "Regex of files to compress")
+    private List<String> compressionWhiteList = new ArrayList<String>();
+
+    @Option(name = "nc", longName = "no-compression", metaVar = "REGEX", skipForExample = true, usage = "Regex of files not to compress")
+    private List<String> compressionBlackList = new ArrayList<String>();
+
+    @Option(name = "C", longName = "compress-all", usage = "Compress all files")
+    private Boolean compressAll = null;
+
+    @Option(name = "r", longName = "root-dir", metaVar = "DIR", usage = "Root directory for archiving / extracting / verifying")
+    private File rootOrNull;
+
+    @Option(name = "D", longName = "suppress-directories", usage = "Supress output for directories itself for LIST and VERIFY")
+    private boolean suppressDirectoryEntries = false;
+
+    @Option(name = "R", longName = "recursive", usage = "Recursive LIST and VERIFY")
+    private boolean recursive = false;
+
+    @Option(name = "v", longName = "verbose", usage = "Verbose output (all operations)")
+    private boolean verbose = false;
+
+    @Option(name = "q", longName = "quiet", usage = "Quiet operation (only error output)")
+    private boolean quiet = false;
+
+    @Option(name = "n", longName = "numeric", usage = "Use numeric values for mode, uid and gid for LIST and VERIFY")
+    private boolean numeric = false;
+
+    @Option(name = "t", longName = "test-checksums", usage = "Test CRC32 checksums of files in archive for LIST")
+    private boolean testAgainstChecksums = false;
+
+    @Option(name = "a", longName = "verify-attributes", usage = "Consider file attributes for VERIFY")
+    private boolean verifyAttributes = false;
+
+    @Option(name = "m", longName = "check-missing-files", usage = "Check for files present on the filesystem but missing from the archive for VERIFY")
+    private boolean checkMissingFile = false;
+
+    @Option(longName = "file-format", skipForExample = true, usage = "Specifies the file format version when creating an archive (N=1 -> HDF51.6 (default), N=2 -> HDF51.8)")
+    private int fileFormat = 1;
+
+    @Option(longName = "stop-on-error", skipForExample = true, usage = "Stop on first error and give detailed error report")
+    private boolean stopOnError = false;
+
+    @Option(longName = "no-sync", skipForExample = true, usage = "Do not sync to disk before program exits (write mode only)")
+    private boolean noSync = false;
+
+    private HDF5Archiver archiver;
+
+    /**
+     * The command line parser.
+     */
+    private final CmdLineParser parser = new CmdLineParser(this);
+
+    private HDF5ArchiverMain(String[] args)
+    {
+        try
+        {
+            parser.parseArgument(args);
+        } catch (CmdLineException ex)
+        {
+            System.err.printf("Error when parsing command line: '%s'\n", ex.getMessage());
+            printHelp(true);
+            initializationOK = false;
+            return;
+        }
+        if (arguments == null || arguments.size() < 2)
+        {
+            printHelp(true);
+            initializationOK = false;
+            return;
+        }
+        command = Command.parse(arguments.get(0));
+        if (command == null || command == Command.HELP)
+        {
+            printHelp(true);
+            initializationOK = false;
+            return;
+        }
+        if (arguments.get(1).endsWith(FILE_EXTENSION_H5)
+                || arguments.get(1).endsWith(FILE_EXTENSION_H5AR))
+        {
+            archiveFile = new File(arguments.get(1));
+        } else
+        {
+            archiveFile = new File(arguments.get(1) + FILE_EXTENSION_H5AR);
+            if (command.isReadOnly() && archiveFile.exists() == false)
+            {
+                archiveFile = new File(arguments.get(1) + FILE_EXTENSION_H5);
+                if (command.isReadOnly() && archiveFile.exists() == false)
+                {
+                    archiveFile = new File(arguments.get(1));
+                }
+            }
+        }
+        if (command.isReadOnly() && archiveFile.exists() == false)
+        {
+            System.err.println("Archive '" + archiveFile.getAbsolutePath() + "' does not exist.");
+            initializationOK = false;
+            return;
+        }
+        if (quiet && verbose)
+        {
+            System.err.println("Cannot be quiet and verbose at the same time.");
+            initializationOK = false;
+            return;
+        }
+        initializationOK = true;
+    }
+
+    @Option(longName = "version", skipForExample = true, usage = "Prints out the version information")
+    void printVersion(final boolean exit)
+    {
+        System.err.println("HDF5 archiver version "
+                + BuildAndEnvironmentInfo.INSTANCE.getFullVersion());
+        if (exit)
+        {
+            System.exit(0);
+        }
+    }
+
+    private boolean helpPrinted = false;
+
+    @Option(longName = "help", skipForExample = true, usage = "Shows this help text")
+    void printHelp(final boolean dummy)
+    {
+        if (helpPrinted)
+        {
+            return;
+        }
+        parser.printHelp("h5ar", "[option [...]]",
+                "[ARCHIVE <archive_file> <item-to-archive> [...] | "
+                        + "CAT <archive_file> <item-to-cat> [...] | "
+                        + "EXTRACT <archive_file> [<item-to-unarchive> [...]] | "
+                        + "DELETE <archive_file> <item-to-delete> [...] | "
+                        + "LIST <archive_file> | VERIFY <archive_file>]", ExampleMode.NONE);
+        System.err.println("ARCHIVE: add files on the file system to an archive");
+        System.err.println("CAT: extract files from an archive to stdout");
+        System.err.println("EXTRACT: extract files from an archive to the file system");
+        System.err.println("DELETE: delete files from an archive");
+        System.err.println("LIST: list files in an archive");
+        System.err
+                .println("VERIFY: verify the existence and integrity of files on the file system vs. the content of an archive");
+        System.err
+                .println("Command aliases: ARCHIVE: A, AR; CAT: C, CT; EXTRACT: E, EX; DELETE: D, REMOVE, RM; LIST: L, LS; VERIFY: V, VF");
+        System.err.println("Example: h5ar" + parser.printExample(ExampleMode.ALL)
+                + " ARCHIVE archive.h5ar .");
+        helpPrinted = true;
+    }
+
+    private boolean createArchiver()
+    {
+        final FileFormat fileFormatEnum =
+                (fileFormat == 1) ? FileFormat.STRICTLY_1_6 : FileFormat.STRICTLY_1_8;
+        try
+        {
+            archiver =
+                    new HDF5Archiver(archiveFile, command.isReadOnly(), noSync, fileFormatEnum,
+                            stopOnError ? IErrorStrategy.DEFAULT_ERROR_STRATEGY
+                                    : ERROR_STRATEGY_CONTINUE);
+        } catch (HDF5JavaException ex)
+        {
+            // Problem opening the archive file: non readable / writable
+            System.err.println("Error opening archive file: " + ex.getMessage());
+            return false;
+        } catch (HDF5LibraryException ex)
+        {
+            // Problem opening the archive file: corrupt file
+            System.err.println("Error opening archive file: corrupt file ["
+                    + ex.getClass().getSimpleName() + ": " + ex.getMessage() + "]");
+            return false;
+        }
+        return true;
+    }
+
+    private ArchivingStrategy createArchivingStrategy()
+    {
+        final ArchivingStrategy strategy =
+                new ArchivingStrategy(compressionBlackList.isEmpty() ? ArchivingStrategy.DEFAULT
+                        : ArchivingStrategy.DEFAULT_NO_COMPRESSION);
+        if (compressAll != null)
+        {
+            strategy.compressAll(compressAll);
+        }
+        for (String pattern : fileWhiteList)
+        {
+            strategy.addToFileWhiteList(pattern);
+        }
+        for (String pattern : fileBlackList)
+        {
+            strategy.addToFileBlackList(pattern);
+        }
+        for (String pattern : dirWhiteList)
+        {
+            strategy.addToDirWhiteList(pattern);
+        }
+        for (String pattern : dirBlackList)
+        {
+            strategy.addToDirBlackList(pattern);
+        }
+        for (String pattern : fileWhiteList)
+        {
+            strategy.addToFileWhiteList(pattern);
+        }
+        for (String pattern : compressionWhiteList)
+        {
+            strategy.addToCompressionWhiteList(pattern);
+        }
+        for (String pattern : compressionBlackList)
+        {
+            strategy.addToCompressionBlackList(pattern);
+        }
+        return strategy;
+    }
+
+    private File getFSRoot()
+    {
+        return (rootOrNull == null) ? new File(".") : rootOrNull;
+    }
+
+    private static class ListingVisitor implements IArchiveEntryVisitor
+    {
+        private final boolean verifying;
+
+        private final boolean quiet;
+
+        private final boolean verbose;
+
+        private final boolean numeric;
+
+        private final boolean suppressDirectoryEntries;
+
+        private int checkSumFailures;
+
+        ListingVisitor(boolean verifying, boolean quiet, boolean verbose, boolean numeric)
+        {
+            this(verifying, quiet, verbose, numeric, false);
+        }
+
+        ListingVisitor(boolean verifying, boolean quiet, boolean verbose, boolean numeric,
+                boolean suppressDirectoryEntries)
+        {
+            this.verifying = verifying;
+            this.quiet = quiet;
+            this.verbose = verbose;
+            this.numeric = numeric;
+            this.suppressDirectoryEntries = suppressDirectoryEntries;
+        }
+
+        @Override
+        public void visit(ArchiveEntry entry)
+        {
+            if (suppressDirectoryEntries && entry.isDirectory())
+            {
+                return;
+            }
+            if (verifying)
+            {
+                final boolean ok = entry.isOK();
+                if (quiet == false)
+                {
+                    System.out.println(entry.describeLink(verbose, numeric, true));
+                }
+                if (ok == false)
+                {
+                    System.err.println(entry.getStatus(true));
+                    ++checkSumFailures;
+                }
+            } else
+            {
+                if (quiet == false)
+                {
+                    System.out.println(entry.describeLink(verbose, numeric, false));
+                }
+            }
+        }
+
+        boolean isOK(int missingFiles)
+        {
+            if (verifying && (checkSumFailures + missingFiles > 0))
+            {
+                System.err.println(checkSumFailures + missingFiles + " file(s) failed the test.");
+                return false;
+            } else
+            {
+                return true;
+            }
+        }
+    }
+
+    boolean run()
+    {
+        if (initializationOK == false)
+        {
+            return false;
+        }
+        try
+        {
+            switch (command)
+            {
+                case ARCHIVE:
+                {
+                    if (arguments.size() == 2)
+                    {
+                        System.err.println("Nothing to archive.");
+                        break;
+                    }
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    final ArchivingStrategy strategy = createArchivingStrategy();
+                    if (verbose)
+                    {
+                        System.out.printf("Archiving to file '%s', file system root: '%s'\n",
+                                archiveFile, getFSRoot());
+                    }
+                    if (rootOrNull != null)
+                    {
+                        for (int i = 2; i < arguments.size(); ++i)
+                        {
+                            if (verbose)
+                            {
+                                System.out.printf("  Adding entry: '%s'\n", arguments.get(i));
+                            }
+                            archiver.archiveFromFilesystem(rootOrNull, new File(rootOrNull,
+                                    arguments.get(i)), strategy,
+                                    verbose ? IArchiveEntryVisitor.NONVERBOSE_VISITOR : null);
+                        }
+                    } else
+                    {
+                        for (int i = 2; i < arguments.size(); ++i)
+                        {
+                            if (verbose)
+                            {
+                                System.out.printf("  Adding entry: '%s'\n", arguments.get(i));
+                            }
+                            archiver.archiveFromFilesystem(new File(arguments.get(i)), strategy,
+                                    true, verbose ? IArchiveEntryVisitor.NONVERBOSE_VISITOR : null);
+                        }
+                    }
+                    break;
+                }
+                case CAT:
+                {
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    if (arguments.size() == 2)
+                    {
+                        System.err.println("Nothing to cat.");
+                        break;
+                    } else
+                    {
+                        for (int i = 2; i < arguments.size(); ++i)
+                        {
+                            archiver.extractFile(arguments.get(i), new FileOutputStream(
+                                    FileDescriptor.out));
+                        }
+                    }
+                    break;
+                }
+                case EXTRACT:
+                {
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    final ArchivingStrategy strategy = createArchivingStrategy();
+                    if (verbose)
+                    {
+                        System.out.printf("Extracting from file '%s', file system root: '%s'\n",
+                                archiveFile, getFSRoot());
+                    }
+                    if (arguments.size() == 2)
+                    {
+                        if (verbose)
+                        {
+                            System.out.println("  Extracting entry: '/'");
+                        }
+                        archiver.extractToFilesystem(getFSRoot(), "/", strategy,
+                                verbose ? IArchiveEntryVisitor.DEFAULT_VISITOR : quiet ? null
+                                        : IArchiveEntryVisitor.NONVERBOSE_VISITOR);
+                    } else
+                    {
+                        for (int i = 2; i < arguments.size(); ++i)
+                        {
+                            if (verbose)
+                            {
+                                System.out.printf("  Extracting entry: '%s'\n", arguments.get(i));
+                            }
+                            final String unixPath =
+                                    FilenameUtils.separatorsToUnix(arguments.get(i));
+                            archiver.extractToFilesystem(getFSRoot(), unixPath, strategy,
+                                    verbose ? IArchiveEntryVisitor.DEFAULT_VISITOR : quiet ? null
+                                            : IArchiveEntryVisitor.NONVERBOSE_VISITOR);
+                        }
+                    }
+                    break;
+                }
+                case DELETE:
+                {
+                    if (arguments.size() == 2)
+                    {
+                        System.err.println("Nothing to delete.");
+                        break;
+                    }
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    if (verbose)
+                    {
+                        System.out.printf("Deleting from file '%s'\n", archiveFile);
+                        for (String entry : arguments.subList(2, arguments.size()))
+                        {
+                            System.out.printf("  Deleting entry: '%s'\n", entry);
+                        }
+                    }
+                    archiver.delete(arguments.subList(2, arguments.size()),
+                            verbose ? IArchiveEntryVisitor.NONVERBOSE_VISITOR : null);
+                    break;
+                }
+                case VERIFY:
+                {
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    if (verbose)
+                    {
+                        System.out.printf("Verifying file '%s', file system root: '%s'\n",
+                                archiveFile, getFSRoot());
+                    }
+                    final String fileOrDir = (arguments.size() > 2) ? arguments.get(2) : "/";
+                    final AtomicInteger missingFileCount = new AtomicInteger();
+                    final IArchiveEntryVisitor missingFileVisitorOrNull =
+                            checkMissingFile ? new IArchiveEntryVisitor()
+                                {
+                                    @Override
+                                    public void visit(ArchiveEntry entry)
+                                    {
+                                        final String errMsg =
+                                                "ERROR: Object '" + entry.getName()
+                                                        + "' does not exist in archive.";
+                                        if (verbose)
+                                        {
+                                            System.out.println(entry.describeLink(true, false,
+                                                    false) + "\t" + errMsg);
+                                        } else if (quiet == false)
+                                        {
+                                            System.out.println(entry.getPath() + "\t" + errMsg);
+                                        }
+                                        System.err.println(errMsg);
+                                        missingFileCount.incrementAndGet();
+                                    }
+                                } : null;
+                    final ListingVisitor visitor =
+                            new ListingVisitor(true, quiet, verbose, numeric);
+                    archiver.verifyAgainstFilesystem(fileOrDir, getFSRoot(), visitor,
+                            missingFileVisitorOrNull, VerifyParameters.build().recursive(recursive)
+                                    .numeric(numeric).verifyAttributes(verifyAttributes).get());
+                    return visitor.isOK(missingFileCount.get());
+                }
+                case LIST:
+                {
+                    if (createArchiver() == false)
+                    {
+                        break;
+                    }
+                    if (verbose)
+                    {
+                        System.out.printf("Listing file '%s'\n", archiveFile);
+                    }
+                    final String fileOrDir = (arguments.size() > 2) ? arguments.get(2) : "/";
+                    final ListingVisitor visitor =
+                            new ListingVisitor(testAgainstChecksums, quiet, verbose, numeric,
+                                    suppressDirectoryEntries);
+                    archiver.list(fileOrDir, visitor, ListParameters.build().recursive(recursive)
+                            .readLinkTargets(verbose).testArchive(testAgainstChecksums).get());
+                    return visitor.isOK(0);
+                }
+                case HELP: // Can't happen any more at this point
+                    break;
+            }
+            return true;
+        } finally
+        {
+            if (archiver != null)
+            {
+                archiver.close();
+            }
+        }
+    }
+
+    public static void main(String[] args)
+    {
+        final HDF5ArchiverMain main = new HDF5ArchiverMain(args);
+        if (main.run() == false)
+        {
+            System.exit(1);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryProcessor.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryProcessor.java
new file mode 100644
index 0000000..48a6f65
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryProcessor.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A processor for an archive entry.
+ * 
+ * @author Bernd Rinn
+ */
+interface IArchiveEntryProcessor
+{
+    /**
+     * Performs any kind of processing of the given <var>link</var>.
+     * 
+     * @param dir The directory the current link is in.
+     * @param path The path of the current link (including the link name)
+     * @param link The link in the archive.
+     * @param reader The HDF5 reader.
+     * @param idCache The cached map of user and group ids to names.
+     * @param errorStrategy The strategy object for errors.
+     * @return <code>true</code> for continuing processing this <var>link</var>, <code>false</code>
+     *         to skip over this entry (only relevant for directory links).
+     */
+    public boolean process(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException, HDF5Exception;
+
+    /**
+     * Performs any kind of post-processing of a directory. This is called after all files in the
+     * directory have been processed.
+     * 
+     * @param dir The directory the current link is in.
+     * @param path The path of the current link (including the link name)
+     * @param link The link in the archive.
+     * @param reader The HDF5 reader.
+     * @param idCache The cached map of user and group ids to names.
+     * @param errorStrategy The strategy object for errors.
+     */
+    public void postProcessDirectory(String dir, String path, LinkRecord link, IHDF5Reader reader,
+            IdCache idCache, IErrorStrategy errorStrategy) throws IOException, HDF5Exception;
+
+    /**
+     * Creates the appropriate exception class for this processor.
+     */
+    public ArchiverException createException(String objectPath, String detailedMsg);
+
+    /**
+     * Creates the appropriate exception class for this processor.
+     */
+    public ArchiverException createException(String objectPath, HDF5Exception cause);
+
+    /**
+     * Creates the appropriate exception class for this processor.
+     */
+    public ArchiverException createException(String objectPath, RuntimeException cause);
+
+    /**
+     * Creates the appropriate exception class for this processor.
+     */
+    public ArchiverException createException(File file, IOException cause);
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryVisitor.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryVisitor.java
new file mode 100644
index 0000000..1f4fa71
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IArchiveEntryVisitor.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+/**
+ * A role to visit {@link ArchiveEntry}s.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IArchiveEntryVisitor
+{
+    public final static IArchiveEntryVisitor DEFAULT_VISITOR = new IArchiveEntryVisitor()
+    {
+        @Override
+        public void visit(ArchiveEntry entry)
+        {
+            System.out.println(entry.describeLink());
+        }
+    };
+
+    public final static IArchiveEntryVisitor NONVERBOSE_VISITOR = new IArchiveEntryVisitor()
+    {
+        @Override
+        public void visit(ArchiveEntry entry)
+        {
+            System.out.println(entry.describeLink(false));
+        }
+    };
+
+    /**
+     * Called for each archive <var>entry</var> which is visited.
+     */
+    public void visit(ArchiveEntry entry);
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndex.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndex.java
new file mode 100644
index 0000000..e751460
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndex.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.Closeable;
+import java.io.Flushable;
+import java.util.Collection;
+import java.util.Iterator;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * Memory representation of the directory index stored in an HDF5 archive.
+ * <p>
+ * Can operate in read-only or read-write mode. The mode is automatically determined by the
+ * <var>hdf5Reader</var> provided the constructor: If this is an instance of {@link IHDF5Writer},
+ * the directory index will be read-write, otherwise read-only.
+ * 
+ * @author Bernd Rinn
+ */
+interface IDirectoryIndex extends Iterable<LinkRecord>, Closeable, Flushable
+{
+
+    /**
+     * Amend the index with link targets. If the links targets have already been read, this method
+     * is a noop.
+     */
+    public void amendLinkTargets();
+
+    public boolean exists(String name);
+
+    public boolean isDirectory(String name);
+
+    /**
+     * Returns the link with {@link LinkRecord#getLinkName()} equal to <var>name</var>, or
+     * <code>null</code>, if there is no such link in the directory index.
+     */
+    public LinkRecord tryGetLink(String name);
+
+    /**
+     * Returns <code>true</code>, if this class has link targets read.
+     */
+    public boolean hasLinkTargets();
+
+    @Override
+    public Iterator<LinkRecord> iterator();
+
+    /**
+     * Writes the directory index to the archive represented by <var>hdf5Writer</var>.
+     * <p>
+     * Works on the list data structure.
+     */
+    @Override
+    public void flush();
+
+    /**
+     * Add <var>entries</var> to the index. Any link that already exists in the index will be
+     * replaced.
+     */
+    public void updateIndex(LinkRecord[] entries);
+
+    /**
+     * Add <var>entries</var> to the index. Any link that already exists in the index will be
+     * replaced.
+     */
+    public void updateIndex(Collection<LinkRecord> entries);
+
+    /**
+     * Add <var>entry</var> to the index. If it already exists in the index, it will be replaced.
+     */
+    public void updateIndex(LinkRecord entry);
+
+    /**
+     * Removes <var>linkName</var> from the index, if it is in.
+     * 
+     * @return <code>true</code>, if <var>linkName</var> was removed.
+     */
+    public boolean remove(String linkName);
+
+    public boolean addFlushable(Flushable flushable);
+    
+    public boolean removeFlushable(Flushable flushable);
+    
+    @Override
+    public void close() throws IOExceptionUnchecked;
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndexProvider.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndexProvider.java
new file mode 100644
index 0000000..2e0dc2b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IDirectoryIndexProvider.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.Closeable;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+
+/**
+ * A provider for {@link DirectoryIndex} objects.
+ * 
+ * @author Bernd Rinn
+ */
+interface IDirectoryIndexProvider extends Closeable
+{
+    public IDirectoryIndex get(String normalizedGroupPath, boolean withLinkTargets);
+
+    public IErrorStrategy getErrorStrategy();
+ 
+    @Override
+    public void close();
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveInfoProvider.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveInfoProvider.java
new file mode 100644
index 0000000..e4cf2e3
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveInfoProvider.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.List;
+
+/**
+ * An info provider for HDF5 archives.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ArchiveInfoProvider
+{
+    //
+    // Information about individual entries
+    //
+
+    /**
+     * Returns <code>true</code>, if an entry <var>path</var> exists in the archive.
+     * 
+     * @param path The path to obtain information for.
+     */
+    public boolean exists(String path);
+
+    /**
+     * Returns <code>true</code>, if a directory entry <var>path</var> exists in the archive.
+     * 
+     * @param path The path to obtain information for.
+     */
+    public boolean isDirectory(String path);
+
+    /**
+     * Returns <code>true</code>, if a regular file entry <var>path</var> exists in the archive.
+     * 
+     * @param path The path to obtain information for.
+     */
+    public boolean isRegularFile(String path);
+
+    /**
+     * Returns <code>true</code>, if a symbolic link entry <var>path</var> exists in the archive.
+     * 
+     * @param path The path to obtain information for.
+     */
+    public boolean isSymLink(String path);
+
+    /**
+     * Returns an archive entry for <var>path</var>, or <code>null</code>, if the archive has no
+     * archive entry for this <var>path</var>.
+     * 
+     * @param path The path to obtain information for.
+     * @param readLinkTarget If <code>true</code> and if the entry is a symbolic link entry, read
+     *            the link target.
+     */
+    public ArchiveEntry tryGetEntry(String path, boolean readLinkTarget);
+
+    /**
+     * Resolves the symbolic link of <var>entry</var>, if any.
+     * 
+     * @param entry The archive entry to resolve.
+     * @return The resolved link, if <var>entry</var> is a symbolic link that links to an existing
+     *         file or directory target, <code>null</code> if <var>entry</var> is a symbolic link
+     *         that links to a non-existing target, or <var>entry</var>, if this is not a link.
+     */
+    public ArchiveEntry tryResolveLink(ArchiveEntry entry);
+
+    /**
+     * Returns the archive entry for <var>path</var>. If <var>path</var> is a symbolic link, the
+     * entry will be resolved to the real file or directory in the archive, or <code>null</code>, if
+     * the link target doesn't exist.
+     * 
+     * @param path The path in the archive to get the entry for.
+     * @param keepPath If <code>true</code>, the resolved entry will keep the <var>path</var>, i.e.
+     *            the returned entry of a symlink will look like a hard link. If <code>false</code>,
+     *            the returned entry will be the entry of the resolved path.
+     * @return The resolved link, if <var>path</var> denotes a file, directory, or symbolic link
+     *         that links to an existing file or directory target, <code>null</code> if
+     *         <var>path</var> denotes a symbolic link that links to a non-existing target.
+     */
+    public ArchiveEntry tryGetResolvedEntry(String path, boolean keepPath);
+
+    //
+    // Listing
+    //
+
+    /**
+     * Returns the list of all entries in the archive recursively.
+     * 
+     * @return The list of archive entries.
+     */
+    public List<ArchiveEntry> list();
+
+    /**
+     * Returns the list of all entries below <var>fileOrDir</var> in the archive recursively.
+     * 
+     * @param fileOrDir The file to list or the directory to list the entries from recursively.
+     * @return The list of archive entries.
+     */
+    public List<ArchiveEntry> list(String fileOrDir);
+
+    /**
+     * Returns the list of entries below <var>fileOrDir</var> in the archive.
+     * 
+     * @param fileOrDir The file to list or the directory to list the entries from.
+     * @param params the parameters to modify the listing behavior.
+     * @return The list of archive entries.
+     */
+    public List<ArchiveEntry> list(String fileOrDir, ListParameters params);
+
+    /**
+     * Returns the list of all entries below <var>fileOrDir</var> in the archive recursively.
+     * 
+     * @param fileOrDir The file to list or the directory to list the entries from recursively.
+     * @param visitor The archive entry visitor to call for each entry.
+     * @return This archive info provider.
+     */
+    public IHDF5ArchiveInfoProvider list(String fileOrDir, IArchiveEntryVisitor visitor);
+
+    /**
+     * Returns the list of entries below <var>fileOrDir</var> in the archive.
+     * 
+     * @param fileOrDir The file to list or the directory to list the entries from.
+     * @param visitor The archive entry visitor to call for each entry.
+     * @param params the parameters to modify the listing behavior.
+     * @return This archive info provider.
+     */
+    public IHDF5ArchiveInfoProvider list(String fileOrDir, IArchiveEntryVisitor visitor,
+            ListParameters params);
+
+    /**
+     * Performs an integrity of the archive.
+     * 
+     * @return All entries which failed the integrity check.
+     */
+    public List<ArchiveEntry> test();
+
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveReader.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveReader.java
new file mode 100644
index 0000000..a36d0e5
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5ArchiveReader.java
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
+import ch.systemsx.cisd.base.io.IInputStream;
+
+/**
+ * An interface for an HDF5 archive reader.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5ArchiveReader extends IHDF5ArchiveInfoProvider
+{
+
+    /**
+     * Closes this object and the file referenced by this object. This object must not be used after
+     * being closed. Calling this method for a second time is a no-op.
+     */
+    public void close();
+
+    /**
+     * Returns <code>true</code> if this archive reader has been already closed.
+     */
+    public boolean isClosed();
+
+    //
+    // Verification
+    //
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param rootDirectoryInArchive The root directory in the archive to start verify from. It will
+     *            be stripped from each entry before <var>rootDirectoryOnFS</var> is added.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @param missingArchiveEntryVisitor The entry visitor to call for each file that exists on the
+     *            filesystem, but is missing in the archive.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor,
+            IArchiveEntryVisitor missingArchiveEntryVisitor, VerifyParameters params);
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param rootDirectoryInArchive The root directory in the archive to start verify from. It will
+     *            be stripped from each entry before <var>rootDirectoryOnFS</var> is added.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor, VerifyParameters params);
+
+    /**
+     * Verifies the content of the complete archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @param missingArchiveEntryVisitor The entry visitor to call for each file that exists on the
+     *            filesystem, but is missing in the archive.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitor,
+            VerifyParameters params);
+
+    /**
+     * Verifies the content of the complete archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, VerifyParameters params);
+
+    /**
+     * Verifies the content of the complete archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @param missingArchiveEntryVisitor The entry visitor to call for each file that exists on the
+     *            filesystem, but is missing in the archive.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitor);
+
+    /**
+     * Verifies the content of the complete archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param visitor The entry visitor to call for each entry. Call {@link ArchiveEntry#isOK()} to
+     *            check whether verification was successful.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor);
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param rootDirectoryInArchive The root directory in the archive to start verify from. It will
+     *            be stripped from each entry before <var>rootDirectoryOnFS</var> is added.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return The list of archive entries which failed verification.
+     */
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, VerifyParameters params);
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param params The parameters to determine behavior of the verification process.
+     * @return The list of archive entries which failed verification.
+     */
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            VerifyParameters params);
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @return The list of archive entries which failed verification.
+     */
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS);
+
+    /**
+     * Verifies the content of the complete archive against the filesystem.
+     * 
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @return The list of archive entries which failed verification.
+     */
+    public List<ArchiveEntry> verifyAgainstFilesystem(File rootDirectoryOnFS);
+
+    /**
+     * Verifies the content of the archive against the filesystem.
+     * 
+     * @param fileOrDir The file or directory entry in the archive to verify. May be empty, in which
+     *            case all entries below <var>rootDirectoryInArchive</var> are verified.
+     * @param rootDirectoryOnFS The root directory on the file system that should be added to each
+     *            entry in the archive when comparing.
+     * @param rootDirectoryInArchive The root directory in the archive to start verify from. It will
+     *            be stripped from each entry before <var>rootDirectoryOnFS</var> is added.
+     */
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive);
+
+    //
+    // Extraction
+    //
+
+    /**
+     * Extract the content of a file in the archive to an {@link OutputStream}.
+     * 
+     * @param path The path of the file to extract the content of.
+     * @param out The output stream to extract the content to.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractFile(String path, OutputStream out);
+
+    /**
+     * Extract the content of a file in the archive to a byte array.
+     * 
+     * @param path The path of the file to extract the content of.
+     * @return The byte array representing the content of the file.
+     */
+    public byte[] extractFileAsByteArray(String path);
+
+    /**
+     * Extract the content of a file in the archive as an {@link IInputStream}.
+     * 
+     * @param path The path of the file to extract the content of.
+     * @return The input stream interface. If an error occurs and the
+     *         {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy} of the archive reader does
+     *         not re-throw the exception, the return value will be <code>null</code> on errors.
+     */
+    public IInputStream extractFileAsIInputStream(String path);
+
+    /**
+     * Extract the content of a file in the archive as an {@link InputStream}.
+     * 
+     * @param path The path of the file to extract the content of.
+     * @return The input stream. If an error occurs and the
+     *         {@link ch.systemsx.cisd.base.exceptions.IErrorStrategy} of the archive reader does
+     *         not re-throw the exception, the return value will be <code>null</code> on errors.
+     */
+    public InputStream extractFileAsInputStream(String path);
+
+    /**
+     * Extracts the complete archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystem(File rootDirectory);
+
+    /**
+     * Extracts a path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param path The path in the archive to extract. This path will be kept unchanged when
+     *            extracted.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystem(File rootDirectory, String path);
+
+    /**
+     * Extracts a path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param path The path in the archive to extract. This path will be kept unchanged when
+     *            extracted.
+     * @param visitorOrNull The entry visitor to call for each entry. Call
+     *            {@link ArchiveEntry#isOK()} to check whether verification was successful. May be
+     *            <code>null</code>.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystem(File rootDirectory, String path,
+            IArchiveEntryVisitor visitorOrNull);
+
+    /**
+     * Extracts a path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param path The path in the archive to extract. This path will be kept unchanged when
+     *            extracted.
+     * @param strategy The strategy to determine which files and directories to extract and which
+     *            ones to suppress.
+     * @param visitorOrNull The entry visitor to call for each entry. Call
+     *            {@link ArchiveEntry#isOK()} to check whether verification was successful. May be
+     *            <code>null</code>.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystem(File rootDirectory, String path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull);
+
+    /**
+     * Extracts all paths below a given directory path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param rootInArchive The root path in the archive to extract. This path will be stripped when
+     *            extracted.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootInArchive);
+
+    /**
+     * Extracts all paths below a given directory path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param rootInArchive The root path in the archive to extract. This path will be stripped when
+     *            extracted.
+     * @param visitorOrNull The entry visitor to call for each entry. Call
+     *            {@link ArchiveEntry#isOK()} to check whether verification was successful. May be
+     *            <code>null</code>.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootInArchive, IArchiveEntryVisitor visitorOrNull);
+
+    /**
+     * Extracts all paths below a given directory path from the archive to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param rootInArchive The root path in the archive to extract. This path will be stripped when
+     *            extracted.
+     * @param strategy The strategy to determine which files and directories to extract and which
+     *            ones to suppress.
+     * @param visitorOrNull The entry visitor to call for each entry. Call
+     *            {@link ArchiveEntry#isOK()} to check whether verification was successful. May be
+     *            <code>null</code>.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootInArchive, ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull);
+
+    /**
+     * Extracts a path from the archive below a given directory path to the file system.
+     * 
+     * @param rootDirectory The directory in the file system to use as root directory for the
+     *            extracted archive path.
+     * @param rootInArchive The root path in the archive to extract. This path will be stripped when
+     *            extracted.
+     * @param path The path in the archive to extract, relative to <var>rootPathInArchive</var>.
+     *            This path will be kept unchanged when extracted.
+     * @param strategy The strategy to determine which files and directories to extract and which
+     *            ones to suppress.
+     * @param visitorOrNull The entry visitor to call for each entry. Call
+     *            {@link ArchiveEntry#isOK()} to check whether verification was successful. May be
+     *            <code>null</code>.
+     * @return This archive reader.
+     */
+    public IHDF5ArchiveReader extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootInArchive, String path, ArchivingStrategy strategy,
+            IArchiveEntryVisitor visitorOrNull);
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5Archiver.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5Archiver.java
new file mode 100644
index 0000000..c4d2cd6
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IHDF5Archiver.java
@@ -0,0 +1,418 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.io.IOutputStream;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewDirectoryArchiveEntry;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewFileArchiveEntry;
+import ch.systemsx.cisd.hdf5.h5ar.NewArchiveEntry.NewSymLinkArchiveEntry;
+
+/**
+ * An interface for the HDF5 archiver.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5Archiver extends IHDF5ArchiveReader
+{
+
+    /**
+     * Flush the underlying HDF5 writer.
+     */
+    public void flush() throws IOException;
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param path The file or directory to archive. Everything below this path is archived.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File path) throws IllegalStateException;
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param path The file or directory to archive. Everything below this path is archived.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File path, ArchivingStrategy strategy);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param path The file or directory to archive. Everything below this path is archived.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File path, IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param path The file or directory to archive. Everything below this path is archived. The
+     *            name part of <var>path</var> may be kept, depending on the value of
+     *            <var>keepNameFromPath</var>.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     * @param keepNameFromPath If <code>true</code>, the name part of <var>path</var> is kept in the
+     *            archive. Otherwise, <var>path</var> will represent "/" in the archive.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File path, ArchivingStrategy strategy,
+            boolean keepNameFromPath, IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param parentDirToStrip The parent directory of <var>path</var> on the filesystem which
+     *            should be stripped in the archive. It is an error, if <var>parentDirToStrip</var>
+     *            is not a parent directory of <var>path</var>. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and
+     *            <code>parentDirToStrip=/home/joe/work</code>, then <code>c</code> will end up in
+     *            the archive at the path <code>a/b</code>.
+     * @param path The file or directory to archive.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param parentDirToStrip The parent directory of <var>path</var> on the filesystem which
+     *            should be stripped in the archive. It is an error, if <var>parentDirToStrip</var>
+     *            is not a parent directory of <var>path</var>. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and
+     *            <code>parentDirToStrip=/home/joe/work</code>, then <code>c</code> will end up in
+     *            the archive at the path <code>a/b</code>.
+     * @param path The file or directory to archive.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path,
+            ArchivingStrategy strategy);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param parentDirToStrip The parent directory of <var>path</var> on the filesystem which
+     *            should be stripped in the archive. It is an error, if <var>parentDirToStrip</var>
+     *            is not a parent directory of <var>path</var>. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and
+     *            <code>parentDirToStrip=/home/joe/work</code>, then <code>c</code> will end up in
+     *            the archive at the path <code>/a/b/c</code>.
+     * @param path The file or directory to archive.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystem(File parentDirToStrip, File path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param path The file or directory to archive.
+     */
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param path The file or directory to archive.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     */
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path,
+            ArchivingStrategy strategy);
+
+    /**
+     * Archive the <var>path</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param path The file or directory to archive.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystem(String rootInArchive, File path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the content below <var>directory</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>directory=/home/joe/work/a/b</code>, directory <code>b</code> has two files
+     *            <code>c</code> and <code>d</code>, and <code>rootInArchive=/t</code>, then the
+     *            archive will have <code>c</code> at path <code>/t/c</code> and <code>d</code> at
+     *            path <code>/t/d</code>.
+     * @param directory The directory to archive the content of. It is an error if this is not a
+     *            directory on the filesystem.
+     */
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory);
+
+    /**
+     * Archive the content below <var>directory</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param directory The directory to archive the content of. It is an error if this is not an
+     *            existing directory.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     */
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            ArchivingStrategy strategy);
+
+    /**
+     * Archive the content below <var>directory</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param directory The directory to archive the content of. It is an error if this is not an
+     *            existing directory.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the content below <var>directory</var> from the filesystem.
+     * 
+     * @param rootInArchive The root directory of <var>path</var> in the archive. Example: If
+     *            <code>path=/home/joe/work/a/b/c</code> and <code>rootInArchive=/t</code>, then
+     *            <code>c</code> will end up in the archive at the path <code>/t/c</code>. If
+     *            <var>rootInArchive</var> is the last part of the parent directory of
+     *            <var>path</var> on the filesystem, then its metadata will be taken from the
+     *            filesystem.
+     * @param directory The directory to archive the content of. It is an error if this is not an
+     *            existing directory.
+     * @param strategy The archiving strategy to use. This strategy object determines which files to
+     *            include and to exclude and which files to compress.
+     * @param visitor The {@link IArchiveEntryVisitor} to use. Can be <code>null</code>.
+     */
+    public IHDF5Archiver archiveFromFilesystemBelowDirectory(String rootInArchive, File directory,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitor);
+
+    /**
+     * Archive the <var>data</var> as file in the archive.
+     * 
+     * @param path The path to archive the data at.
+     * @param data The bytes to archive as file content.
+     */
+    public IHDF5Archiver archiveFile(String path, byte[] data);
+
+    /**
+     * Archive the <var>input</var> as file in the archive.
+     * 
+     * @param path The path to archive the data at.
+     * @param input The input stream to get the file content from.
+     */
+    public IHDF5Archiver archiveFile(String path, InputStream input);
+
+    /**
+     * Archive the <var>input</var> as file in the archive.
+     * 
+     * @param entry The archive entry (defining the path) to archive the data at.
+     * @param input The input stream to get the file content from.
+     */
+    public IHDF5Archiver archiveFile(NewFileArchiveEntry entry, InputStream input);
+
+    /**
+     * Archive the <var>data</var> as file in the archive.
+     * 
+     * @param entry The archive entry (defining the path) to archive the data at.
+     * @param data The bytes to archive as file content.
+     */
+    public IHDF5Archiver archiveFile(NewFileArchiveEntry entry, byte[] data);
+
+    /**
+     * Return an {@link IOutputStream} that can be used to write the content of a file into the
+     * archive.
+     * 
+     * @param entry The archive entry (defining the path) to archive the data at.
+     * @return The output stream that the file content is written to.
+     */
+    public IOutputStream archiveFileAsIOutputStream(NewFileArchiveEntry entry);
+
+    /**
+     * Return an {@link OutputStream} that can be used to write the content of a file into the
+     * archive.
+     * 
+     * @param entry The archive entry (defining the path) to archive the data at.
+     * @return The output stream that the file content is written to.
+     */
+    public OutputStream archiveFileAsOutputStream(NewFileArchiveEntry entry);
+
+    /**
+     * Add a new symbolic link to the archive.
+     * 
+     * @param path The path where the symbolic link resides.
+     * @param linkTarget The target where the symbolic link points to.
+     */
+    public IHDF5Archiver archiveSymlink(String path, String linkTarget);
+
+    /**
+     * Add a new symbolic link to the archive.
+     * 
+     * @param entry The archive entry describing the symbolic link.
+     */
+    public IHDF5Archiver archiveSymlink(NewSymLinkArchiveEntry entry);
+
+    /**
+     * Add a new directory to the archive.
+     * 
+     * @param path The path in the archive where the directory resides.
+     */
+    public IHDF5Archiver archiveDirectory(String path);
+
+    /**
+     * Add a new directory to the archive.
+     * 
+     * @param entry The archive entry describing the directory.
+     */
+    public IHDF5Archiver archiveDirectory(NewDirectoryArchiveEntry entry);
+
+    /**
+     * Deletes a <var>path</var> from the archive.
+     * 
+     * @param path The path to delete.
+     */
+    public IHDF5Archiver delete(String path);
+
+    /**
+     * Deletes a list of <var>paths</var> from the archive.
+     * 
+     * @param paths The paths to delete.
+     */
+    public IHDF5Archiver delete(List<String> paths);
+
+    /**
+     * Deletes a list of <var>paths</var> from the archive.
+     * 
+     * @param paths The paths to delete.
+     * @param entryVisitorOrNull The visitor for each archive entry which is actually deleted. If no
+     *            errors occur, the visitor will be called once for each path in the list of
+     *            <var>paths</var>.
+     */
+    public IHDF5Archiver delete(List<String> paths, IArchiveEntryVisitor entryVisitorOrNull);
+
+    // Method overridden from IHDF5ArchiveReader, see there for javadoc.
+
+    @Override
+    public IHDF5Archiver list(String fileOrDir, IArchiveEntryVisitor visitor);
+
+    @Override
+    public IHDF5Archiver list(String fileOrDir, IArchiveEntryVisitor visitor, ListParameters params);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor,
+            IArchiveEntryVisitor missingArchiveEntryVisitor, VerifyParameters params);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, IArchiveEntryVisitor visitor, VerifyParameters params);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitor,
+            VerifyParameters params);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, VerifyParameters params);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor, IArchiveEntryVisitor missingArchiveEntryVisitor);
+
+    @Override
+    public IHDF5Archiver verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            IArchiveEntryVisitor visitor);
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive, VerifyParameters params);
+
+    @Override
+    public List<ArchiveEntry> verifyAgainstFilesystem(String fileOrDir, File rootDirectoryOnFS,
+            String rootDirectoryInArchive);
+
+    @Override
+    public IHDF5Archiver extractFile(String path, OutputStream out) throws IOExceptionUnchecked;
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory);
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path)
+            throws IllegalStateException;
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path,
+            IArchiveEntryVisitor visitor) throws IllegalStateException;
+
+    @Override
+    public IHDF5Archiver extractToFilesystem(File rootDirectory, String path,
+            ArchivingStrategy strategy, IArchiveEntryVisitor visitor) throws IllegalStateException;
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive);
+    
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive, IArchiveEntryVisitor visitorOrNull);
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory,
+            String rootPathInArchive, ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull);
+
+    @Override
+    public IHDF5Archiver extractToFilesystemBelowDirectory(File rootDirectory, String rootPathInArchive,
+            String path, ArchivingStrategy strategy, IArchiveEntryVisitor visitorOrNull)
+            throws IllegalStateException;
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/IdCache.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/IdCache.java
new file mode 100644
index 0000000..40e71ce
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/IdCache.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Stat;
+
+/**
+ * Cache for ID -> Name mapping.
+ * 
+ * @author Bernd Rinn
+ */
+final class IdCache
+{
+    /** Gid -> Group Name */
+    private final Map<Integer, String> gidMap = Collections
+            .synchronizedMap(new HashMap<Integer, String>());
+
+    /** Uid -> User Name */
+    private final Map<Integer, String> uidMap = Collections
+            .synchronizedMap(new HashMap<Integer, String>());
+
+    /**
+     * Returns the name for the given <var>uid</var>.
+     */
+    String getUser(LinkRecord link, boolean numeric)
+    {
+        return getUser(link.getUid(), numeric);
+    }
+
+    /**
+     * Returns the name for the given <var>uid</var>.
+     */
+    String getUser(Stat link, boolean numeric)
+    {
+        return getUser(link.getUid(), numeric);
+    }
+
+    String getUser(int uid, boolean numeric)
+    {
+        String userNameOrNull = uidMap.get(uidMap);
+        if (userNameOrNull == null)
+        {
+            userNameOrNull =
+                    (numeric == false && Unix.isOperational()) ? Unix.tryGetUserNameForUid(uid)
+                            : null;
+            if (userNameOrNull == null)
+            {
+                userNameOrNull = Integer.toString(uid);
+            }
+            uidMap.put(uid, userNameOrNull);
+        }
+        return userNameOrNull;
+    }
+
+    /**
+     * Returns the name for the given <var>gid</var>.
+     */
+    String getGroup(LinkRecord link, boolean numeric)
+    {
+        return getGroup(link.getGid(), numeric);
+    }
+
+    /**
+     * Returns the name for the given <var>gid</var>.
+     */
+    String getGroup(Stat link, boolean numeric)
+    {
+        return getGroup(link.getGid(), numeric);
+    }
+
+    /**
+     * Returns the name for the given <var>gid</var>.
+     */
+    String getGroup(int gid, boolean numeric)
+    {
+        String groupNameOrNull = gidMap.get(uidMap);
+        if (groupNameOrNull == null)
+        {
+            groupNameOrNull =
+                    (numeric == false && Unix.isOperational()) ? Unix.tryGetGroupNameForGid(gid)
+                            : null;
+            if (groupNameOrNull == null)
+            {
+                groupNameOrNull = Integer.toString(gid);
+            }
+            gidMap.put(gid, groupNameOrNull);
+        }
+        return groupNameOrNull;
+    }
+}
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkRecord.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkRecord.java
new file mode 100644
index 0000000..66fa04a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkRecord.java
@@ -0,0 +1,493 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Stat;
+import ch.systemsx.cisd.hdf5.CompoundElement;
+import ch.systemsx.cisd.hdf5.CompoundType;
+import ch.systemsx.cisd.hdf5.HDF5LinkInformation;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A class containing all information we need to have about a link either in the file system or in
+ * an HDF5 container.
+ * 
+ * @author Bernd Rinn
+ */
+ at CompoundType(name = "Link", mapAllFields = false)
+final class LinkRecord implements Comparable<LinkRecord>
+{
+    @CompoundElement(memberName = "linkNameLength")
+    private int linkNameLength;
+
+    @CompoundElement(memberName = "linkType", typeName = "linkType")
+    private FileLinkType linkType;
+
+    @CompoundElement(memberName = "size")
+    private long size;
+
+    @CompoundElement(memberName = "lastModified")
+    private long lastModified;
+
+    @CompoundElement(memberName = "uid")
+    private int uid;
+
+    @CompoundElement(memberName = "gid")
+    private int gid;
+
+    @CompoundElement(memberName = "permissions")
+    private short permissions;
+
+    @CompoundElement(memberName = "checksum")
+    private int crc32;
+
+    private boolean hasCrc32Checksum = false;
+
+    private String linkName;
+
+    private String linkTargetOrNull;
+
+    private FileLinkType verifiedType;
+
+    private long verifiedSize = Utils.UNKNOWN;
+
+    private int verifiedCrc32 = 0;
+
+    private long verifiedLastModified = Utils.UNKNOWN;
+
+    /**
+     * Returns a {@link LinkRecord} object for the given <var>link</var> {@link File}, or
+     * <code>null</code> if a system call fails and <var>continueOnError</var> is <code>true</code>.
+     */
+    public static LinkRecord tryCreate(File file, IErrorStrategy errorStrategy)
+    {
+        try
+        {
+            return new LinkRecord(file);
+        } catch (IOExceptionUnchecked ex)
+        {
+            errorStrategy.dealWithError(new ArchivingException(file, ex.getCause()));
+            return null;
+        }
+    }
+
+    /**
+     * Returns the link target of <var>symbolicLink</var>, or <code>null</code>, if
+     * <var>symbolicLink</var> is not a symbolic link or the link target could not be read.
+     */
+    public static String tryReadLinkTarget(File symbolicLink)
+    {
+        if (Unix.isOperational())
+        {
+            return Unix.tryReadSymbolicLink(symbolicLink.getPath());
+        } else
+        {
+            return null;
+        }
+    }
+
+    /**
+     * Returns a link record for <var>normalizedPath</var> in the HDF5 archive represented by
+     * <var>hdf5Reader</var>, or <code>null</code>, if this path does not exist in the archive.
+     */
+    public static LinkRecord tryReadFromArchive(IHDF5Reader hdf5Reader, String normalizedPath)
+    {
+        final HDF5LinkInformation linfo = hdf5Reader.object().getLinkInformation(normalizedPath);
+        if (linfo.exists() == false)
+        {
+            return null;
+        }
+        final long size =
+                linfo.isDataSet() ? hdf5Reader.object().getSize(linfo.getPath()) : Utils.UNKNOWN;
+        return new LinkRecord(linfo, size);
+
+    }
+
+    /**
+     * Used by the HDF5 library during reading.
+     */
+    LinkRecord()
+    {
+    }
+
+    /**
+     * A link for user-created {@Link NewArchiveEntry}.
+     */
+    LinkRecord(NewArchiveEntry entry)
+    {
+        this(entry.getName(), entry.getLinkTarget(), entry.getLinkType(), Utils.UNKNOWN, entry
+                .getLastModified(), entry.getUid(), entry.getGid(), entry.getPermissions(),
+                Utils.UNKNOWN);
+    }
+
+    /**
+     * Creates a link record for a new directory entry.
+     */
+    LinkRecord(String hdf5DirectoryPath)
+    {
+        this(hdf5DirectoryPath, System.currentTimeMillis() / Utils.MILLIS_PER_SECOND, Utils
+                .getCurrentUid(), Utils.getCurrentGid(), (short) 0755);
+    }
+
+    /**
+     * Creates the root directory entry from the File of the HDF5 archive.
+     */
+    static LinkRecord getLinkRecordForArchiveRoot(File hdf5Archive)
+    {
+        if (Unix.isOperational())
+        {
+            final Stat stat = Unix.getFileInfo(hdf5Archive.getPath());
+            return new LinkRecord("", stat.getLastModified(), stat.getUid(), stat.getGid(),
+                    stat.getPermissions());
+        } else
+        {
+            return new LinkRecord("", hdf5Archive.lastModified() / Utils.MILLIS_PER_SECOND,
+                    Utils.getCurrentUid(), Utils.getCurrentGid(), Utils.UNKNOWN_S);
+        }
+    }
+
+    /**
+     * Creates the link record for a file in the file system.
+     */
+    static LinkRecord getLinkRecordForLink(File file)
+    {
+        if (Unix.isOperational())
+        {
+            final Stat stat = Unix.getLinkInfo(file.getPath());
+            return new LinkRecord(file.getName(), stat.tryGetSymbolicLink(), stat.getLinkType(),
+                    stat.getSize(), stat.getLastModified(), stat.getUid(), stat.getGid(),
+                    stat.getPermissions(), (short) 0);
+        } else
+        {
+            return new LinkRecord(file.getName(), null, file.isDirectory() ? FileLinkType.DIRECTORY
+                    : FileLinkType.REGULAR_FILE, file.length(), file.lastModified()
+                    / Utils.MILLIS_PER_SECOND, Utils.getCurrentUid(), Utils.getCurrentGid(),
+                    Utils.UNKNOWN_S, (short) 0);
+        }
+    }
+
+    /**
+     * Creates a directory entry.
+     */
+    LinkRecord(String hdf5DirectoryPath, long lastModified, int uid, int gid, short permissions)
+    {
+        this.linkName = hdf5DirectoryPath;
+        this.linkTargetOrNull = null;
+        this.linkType = FileLinkType.DIRECTORY;
+        this.lastModified = lastModified;
+        this.uid = uid;
+        this.gid = gid;
+        this.permissions = permissions;
+    }
+
+    /**
+     * Used by {@link DirectoryIndex}.
+     */
+    LinkRecord(HDF5LinkInformation info, long size)
+    {
+        this.linkName = info.getName();
+        this.linkTargetOrNull = info.tryGetSymbolicLinkTarget();
+        this.linkType = Utils.translateType(info.getType());
+        this.size = size;
+        this.lastModified = Utils.UNKNOWN;
+        this.uid = Utils.UNKNOWN;
+        this.gid = Utils.UNKNOWN;
+        this.permissions = Utils.UNKNOWN_S;
+    }
+
+    /**
+     * Returns a {@link LinkRecord} object for the given <var>link</var> {@link File}.
+     */
+    private LinkRecord(File file)
+    {
+        this.linkName = file.getName();
+        if (Unix.isOperational())
+        {
+            final Stat info = Unix.getLinkInfo(file.getPath(), false);
+            this.linkType = info.getLinkType();
+            this.size = (linkType == FileLinkType.REGULAR_FILE) ? info.getSize() : 0;
+            this.lastModified = info.getLastModified();
+            this.uid = info.getUid();
+            this.gid = info.getGid();
+            this.permissions = info.getPermissions();
+        } else
+        {
+            this.linkType =
+                    (file.isDirectory()) ? FileLinkType.DIRECTORY
+                            : (file.isFile() ? FileLinkType.REGULAR_FILE : FileLinkType.OTHER);
+            this.size = (linkType == FileLinkType.REGULAR_FILE) ? file.length() : 0;
+            this.lastModified = file.lastModified() / Utils.MILLIS_PER_SECOND;
+            this.uid = Utils.UNKNOWN;
+            this.gid = Utils.UNKNOWN;
+            this.permissions = Utils.UNKNOWN_S;
+        }
+        if (linkType == FileLinkType.SYMLINK)
+        {
+            this.linkTargetOrNull = tryReadLinkTarget(file);
+        }
+    }
+
+    LinkRecord(String linkName, String linkTargetOrNull, FileLinkType linkType, long size,
+            long lastModified, int uid, int gid, short permissions, int crc32)
+    {
+        this.linkName = linkName;
+        this.linkTargetOrNull = linkTargetOrNull;
+        this.linkType = linkType;
+        this.size = size;
+        this.lastModified = lastModified;
+        this.uid = uid;
+        this.gid = gid;
+        this.permissions = permissions;
+        this.crc32 = crc32;
+    }
+
+    /**
+     * Call this method after reading the link from the archive and before using it.
+     */
+    int initAfterReading(String concatenatedNames, int startPos, IHDF5Reader reader,
+            String groupPath, boolean readLinkTarget)
+    {
+        this.hasCrc32Checksum = true;
+        final int endPos = startPos + linkNameLength;
+        this.linkName = concatenatedNames.substring(startPos, endPos);
+        if (readLinkTarget && linkType == FileLinkType.SYMLINK)
+        {
+            this.linkTargetOrNull =
+                    reader.object().getLinkInformation(groupPath + "/" + linkName)
+                            .tryGetSymbolicLinkTarget();
+        }
+        return endPos;
+    }
+
+    /**
+     * Call this method to read additionally the link target of a symlink.
+     */
+    void addLinkTarget(IHDF5Reader reader, String groupPath)
+    {
+        if (linkType == FileLinkType.SYMLINK && linkTargetOrNull == null)
+        {
+            this.linkTargetOrNull =
+                    reader.object().getLinkInformation(groupPath + "/" + linkName)
+                            .tryGetSymbolicLinkTarget();
+        }
+    }
+
+    /**
+     * Call this method before writing the link to the archive.
+     */
+    void prepareForWriting(StringBuilder concatenatedNames)
+    {
+        this.linkNameLength = this.linkName.length();
+        concatenatedNames.append(linkName);
+    }
+
+    public String getLinkName()
+    {
+        return linkName;
+    }
+
+    public String tryGetLinkTarget()
+    {
+        return linkTargetOrNull;
+    }
+
+    public boolean isDirectory()
+    {
+        return linkType == FileLinkType.DIRECTORY;
+    }
+
+    public boolean isSymLink()
+    {
+        return linkType == FileLinkType.SYMLINK;
+    }
+
+    public boolean isRegularFile()
+    {
+        return linkType == FileLinkType.REGULAR_FILE;
+    }
+
+    public FileLinkType getLinkType()
+    {
+        return linkType;
+    }
+
+    public void setSize(long size)
+    {
+        this.size = size;
+    }
+
+    public long getSize()
+    {
+        return size;
+    }
+
+    public boolean hasLastModified()
+    {
+        return lastModified >= 0;
+    }
+
+    public long getLastModified()
+    {
+        return lastModified;
+    }
+
+    public boolean hasUnixPermissions()
+    {
+        return uid >= 0 && gid >= 0 && permissions >= 0;
+    }
+
+    public int getUid()
+    {
+        return uid;
+    }
+
+    public int getGid()
+    {
+        return gid;
+    }
+
+    public short getPermissions()
+    {
+        return permissions;
+    }
+
+    public ArchiveEntryCompleteness getCompleteness()
+    {
+        if (hasUnixPermissions())
+        {
+            return ArchiveEntryCompleteness.FULL;
+        } else if (hasLastModified())
+        {
+            return ArchiveEntryCompleteness.LAST_MODIFIED;
+        } else
+        {
+            return ArchiveEntryCompleteness.BASE;
+        }
+    }
+
+    public int getCrc32()
+    {
+        return crc32;
+    }
+
+    public void setCrc32(int crc32)
+    {
+        this.crc32 = crc32;
+        this.hasCrc32Checksum = true;
+    }
+
+    boolean hasCRC32Checksum()
+    {
+        return hasCrc32Checksum;
+    }
+
+    public FileLinkType getVerifiedType()
+    {
+        return verifiedType;
+    }
+
+    public void setVerifiedType(FileLinkType verifiedType)
+    {
+        this.verifiedType = verifiedType;
+    }
+
+    public int getVerifiedCrc32()
+    {
+        return verifiedCrc32;
+    }
+
+    public long getVerifiedSize()
+    {
+        return verifiedSize;
+    }
+
+    public long getVerifiedLastModified()
+    {
+        return verifiedLastModified;
+    }
+
+    public void setFileVerification(long size, int crc32, long lastModified)
+    {
+        this.verifiedSize = size;
+        this.verifiedCrc32 = crc32;
+        this.verifiedLastModified = lastModified;
+    }
+
+    public void resetVerification()
+    {
+        verifiedType = null;
+        verifiedSize = Utils.UNKNOWN;
+        verifiedCrc32 = 0;
+        verifiedLastModified = Utils.UNKNOWN;
+    }
+
+    //
+    // Comparable
+    //
+
+    @Override
+    public int compareTo(LinkRecord o)
+    {
+        // We put all directories before all files.
+        if (isDirectory() && o.isDirectory() == false)
+        {
+            return -1;
+        } else if (isDirectory() == false && o.isDirectory())
+        {
+            return 1;
+        } else
+        {
+            return getLinkName().compareTo(o.getLinkName());
+        }
+    }
+
+    //
+    // Object
+    //
+
+    @Override
+    public boolean equals(Object obj)
+    {
+        if (obj == null || obj instanceof LinkRecord == false)
+        {
+            return false;
+        }
+        final LinkRecord that = (LinkRecord) obj;
+        return this.linkName.equals(that.linkName);
+    }
+
+    @Override
+    public int hashCode()
+    {
+        return linkName.hashCode();
+    }
+
+    @Override
+    public String toString()
+    {
+        return "LinkRecord [linkName=" + linkName + ", linkType=" + linkType + ", size=" + size
+                + ", lastModified=" + lastModified + ", uid=" + uid + ", gid=" + gid
+                + ", permissions=" + permissions + ", crc32=" + crc32 + ", linkTargetOrNull="
+                + linkTargetOrNull + "]";
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkStore.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkStore.java
new file mode 100644
index 0000000..712e3e7
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/LinkStore.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+
+/**
+ * A store for {@link LinkRecord}s.
+ * 
+ * @author Bernd Rinn
+ */
+final class LinkStore implements Iterable<LinkRecord>
+{
+    private Map<String, LinkRecord> linkMap;
+
+    private LinkRecord[] sortedArrayOrNull;
+    
+    private boolean linkMapPopulated = false;
+
+    /**
+     * Creates a new empty link list.
+     */
+    LinkStore()
+    {
+        this(new LinkRecord[0]);
+    }
+
+    /**
+     * Creates a new link store and populates it with <var>entries</var>.
+     * 
+     * @param sortedEntries The links to populate the store with initially. The links are expected to be
+     *            sorted.
+     */
+    LinkStore(LinkRecord[] sortedEntries)
+    {
+        this.sortedArrayOrNull = sortedEntries;
+    }
+
+    private Map<String, LinkRecord> getLinkMap()
+    {
+        if (linkMapPopulated == false && sortedArrayOrNull != null)
+        {
+            linkMap = new HashMap<String, LinkRecord>(sortedArrayOrNull.length);
+            // Build the map lazily.
+            for (LinkRecord entry : sortedArrayOrNull)
+            {
+                linkMap.put(entry.getLinkName(), entry);
+            }
+            linkMapPopulated = true;
+        }
+        return linkMap;
+    }
+
+    /**
+     * Returns an array of the links in this store, in the order defined by
+     * {@link LinkRecord#compareTo(LinkRecord)}.
+     */
+    public synchronized LinkRecord[] getLinkArray()
+    {
+        if (sortedArrayOrNull == null)
+        {
+            sortedArrayOrNull = getLinkMap().values().toArray(new LinkRecord[getLinkMap().size()]);
+            Arrays.sort(sortedArrayOrNull);
+        }
+        return sortedArrayOrNull;
+    }
+
+    public synchronized void amendLinkTargets(IHDF5Reader reader, String groupPath)
+    {
+        for (LinkRecord link : getLinkMap().values())
+        {
+            link.addLinkTarget(reader, groupPath);
+        }
+    }
+
+    /**
+     * Returns the link with {@link LinkRecord#getLinkName()} equal to <var>name</var>, or
+     * <code>null</code>, if there is no such link in the directory index.
+     */
+    public synchronized LinkRecord tryGetLink(String name)
+    {
+        return getLinkMap().get(name);
+    }
+
+    public boolean exists(String name)
+    {
+        return tryGetLink(name) != null;
+    }
+
+    /**
+     * Returns <code>true</code> if this list is empty.
+     */
+    public synchronized boolean isEmpty()
+    {
+        return getLinkMap().isEmpty();
+    }
+
+    //
+    // Iterable<Link>
+    //
+
+    /**
+     * Returns an iterator over all links in the list, in the order defined by
+     * {@link LinkRecord#compareTo(LinkRecord)}.
+     */
+    @Override
+    public synchronized Iterator<LinkRecord> iterator()
+    {
+        final LinkRecord[] list = getLinkArray();
+        for (LinkRecord link : list)
+        {
+            link.resetVerification();
+        }
+        return new ArrayList<LinkRecord>(Arrays.asList(list)).iterator();
+    }
+
+    /**
+     * Updates the <var>entries</var> in the store.
+     */
+    public synchronized void update(LinkRecord entry)
+    {
+        getLinkMap().put(entry.getLinkName(), entry);
+        sortedArrayOrNull = null;
+    }
+
+    /**
+     * Updates the <var>entries</var> in the store.
+     */
+    public synchronized void update(LinkRecord[] entries)
+    {
+        for (LinkRecord entry : entries)
+        {
+            getLinkMap().put(entry.getLinkName(), entry);
+        }
+        if (entries.length > 0)
+        {
+            sortedArrayOrNull = null;
+        }
+    }
+
+    /**
+     * Updates the <var>entries</var> in the store.
+     */
+    public synchronized void update(Collection<LinkRecord> entries)
+    {
+        for (LinkRecord entry : entries)
+        {
+            getLinkMap().put(entry.getLinkName(), entry);
+        }
+        if (entries.size() > 0)
+        {
+            sortedArrayOrNull = null;
+        }
+    }
+
+    /**
+     * Removes <var>linkName</var> from the store.
+     * 
+     * @return <code>true</code>, if it was removed, <code>false</code>, if it couldn't be found.
+     */
+    public synchronized boolean remove(String linkName)
+    {
+        final boolean storeChanged = (getLinkMap().remove(linkName) != null);
+        if (storeChanged)
+        {
+            sortedArrayOrNull = null;
+        }
+        return storeChanged;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveException.java
new file mode 100644
index 0000000..36c3f6b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveException.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+/**
+ * Exception thrown when listing a file / directory in an archive fails.
+ *
+ * @author Bernd Rinn
+ */
+public class ListArchiveException extends ArchiverException
+{
+
+    private static final long serialVersionUID = 1L;
+    
+    private static final String OPERATION_NAME = "listing";
+    
+    public ListArchiveException(String objectPath, String detailedMsg)
+    {
+        super(objectPath, OPERATION_NAME, detailedMsg);
+    }
+    
+    public ListArchiveException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public ListArchiveException(String objectPath, RuntimeException cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public ListArchiveException(File file, IOException cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveTooManySymbolicLinksException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveTooManySymbolicLinksException.java
new file mode 100644
index 0000000..6ba68a6
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListArchiveTooManySymbolicLinksException.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+/**
+ * This exception is thrown when HDF5 detects too many symbolic links in a path. This error
+ * indicates that there is an infinite loop of symbolic links.
+ * 
+ * @author Bernd Rinn
+ */
+public class ListArchiveTooManySymbolicLinksException extends ListArchiveException
+{
+    private static final long serialVersionUID = 1L;
+
+    public ListArchiveTooManySymbolicLinksException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, cause);
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/ListParameters.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListParameters.java
new file mode 100644
index 0000000..09861a8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/ListParameters.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+/**
+ * A class that represents parameters for {@link HDF5Archiver#list(String, ListParameters)}.
+ * 
+ * @author Bernd Rinn
+ */
+public final class ListParameters
+{
+    private final boolean recursive;
+
+    private final boolean readLinkTargets;
+
+    private final boolean testArchive;
+
+    private final boolean suppressDirectoryEntries;
+
+    private final boolean includeTopLevelDirectoryEntry;
+
+    private final boolean resolveSymbolicLinks;
+
+    private final boolean followSymbolicLinks;
+
+    /**
+     * The default list parameters:
+     * <ul>
+     * <li>recursive</li>
+     * <li>does not test the integrity of the archive</li>
+     * <li>does not suppress directory entries</li>
+     * <li>exclude the top-level directory</li>
+     * <li>reads symbolic link targets</li>
+     * <li>does not resolve symbolic links</li>
+     * <li>does not follow symbolic links</li>
+     * </ul>
+     */
+    public static final ListParameters DEFAULT = new ListParameters(true, true, false, false,
+            false, false, false);
+
+    /**
+     * The list parameters for testing the archive integrity:
+     * <ul>
+     * <li>recursive</li>
+     * <li>tests the integrity of the archive</li>
+     * <li>does not suppress directory entries</li>
+     * <li>includes the top-level directory</li>
+     * <li>reads symbolic link targets</li>
+     * <li>does not resolve symbolic links</li>
+     * <li>does not follow symbolic links</li>
+     * </ul>
+     */
+    public static final ListParameters TEST = new ListParameters(true, true, true, false, true,
+            false, false);
+
+    /**
+     * A class for constructing a new list parameters object.
+     */
+    public static final class ListParametersBuilder
+    {
+        private boolean recursive = true;
+
+        private boolean readLinkTargets = true;
+
+        private boolean testArchive = false;
+
+        private boolean suppressDirectoryEntries = false;
+
+        private boolean includeTopLevelDirectoryEntry = false;
+
+        private boolean resolveSymbolicLinks = false;
+
+        private boolean followSymbolicLinks = false;
+
+        private ListParametersBuilder()
+        {
+        }
+
+        /**
+         * Perform a non-recursive listing, i.e. do not traverse sub-directories.
+         */
+        public ListParametersBuilder nonRecursive()
+        {
+            this.recursive = false;
+            return this;
+        }
+
+        /**
+         * If <var>recursive</var> is <code>true</code>, perform a recursive listing, if it is
+         * <code>false</code>, perform a non-recursive listing, i.e. do not traverse
+         * sub-directories.
+         */
+        public ListParametersBuilder recursive(@SuppressWarnings("hiding")
+        boolean recursive)
+        {
+            this.recursive = recursive;
+            return this;
+        }
+
+        /**
+         * Do not read the link target of symbolic links.
+         */
+        public ListParametersBuilder noReadLinkTarget()
+        {
+            this.readLinkTargets = false;
+            return this;
+        }
+
+        /**
+         * If <var>readLinkTargets</var> is <code>true</code>, then read the link targets of
+         * symbolic links, if it is <code>false</code>, do not read the link targets.
+         */
+        public ListParametersBuilder readLinkTargets(@SuppressWarnings("hiding")
+        boolean readLinkTargets)
+        {
+            this.readLinkTargets = readLinkTargets;
+            return this;
+        }
+
+        /**
+         * Perform an integrity test of the archive, i.e. see whether the index and the content of
+         * the archive match with respect to types, sizes and checksums.
+         */
+        public ListParametersBuilder testArchive()
+        {
+            this.testArchive = true;
+            return this;
+        }
+
+        /**
+         * If <var>testArchive</var> is <code>true</code>, perform an integrity test of the archive,
+         * i.e. see whether the index and the content of the archive match with respect to types,
+         * sizes and checksums, if it is <code>false</code>, do not perform an integrity check.
+         */
+        public ListParametersBuilder testArchive(@SuppressWarnings("hiding")
+        boolean testArchive)
+        {
+            this.testArchive = testArchive;
+            return this;
+        }
+
+        /**
+         * Suppress directory entries from being listed. Only files and links will be listed.
+         */
+        public ListParametersBuilder suppressDirectoryEntries()
+        {
+            this.suppressDirectoryEntries = true;
+            return this;
+        }
+
+        /**
+         * If <var>suppressDirectoryEntries</var> is <code>true</code>, suppress directory entries
+         * from being listed. Only files and links will be listed, if it is <code>false</code>, list
+         * also directories.
+         */
+        public ListParametersBuilder suppressDirectoryEntries(@SuppressWarnings("hiding")
+        boolean suppressDirectoryEntries)
+        {
+            this.suppressDirectoryEntries = suppressDirectoryEntries;
+            return this;
+        }
+
+        /**
+         * Includes the top-level (or starting) directory into the listing.
+         * <p>
+         * Note that the root directory "/" will never be listed, so this parameter is only
+         * effective when the top-level directory of the listing is <i>not</i> the root directory.
+         */
+        public ListParametersBuilder includeTopLevelDirectoryEntry()
+        {
+            this.includeTopLevelDirectoryEntry = true;
+            return this;
+        }
+
+        /**
+         * If <var>includeTopLevelDirectoryEntry</var> is <code>true</code>, includes the top-level
+         * directory into the listing, if it is <code>false</code>, exclude the top-level directory
+         * from the listing.
+         * <p>
+         * Note that the root directory "/" will never be listed, so this parameter is only
+         * effective when the top-level directory of the listing is <i>not</i> the root directory.
+         */
+        public ListParametersBuilder includeTopLevelDirectoryEntry(@SuppressWarnings("hiding")
+        boolean includeTopLevelDirectoryEntry)
+        {
+            this.includeTopLevelDirectoryEntry = includeTopLevelDirectoryEntry;
+            return this;
+        }
+
+        /**
+         * Resolve symbolic links to their link targets.
+         * <p>
+         * This makes symbolic links kind of appear like hard links in the listing. Note, however,
+         * that symbolic links to directories being resolved do not lead to the directory being
+         * traversed expect if also {@link #followSymbolicLinks()} is given.
+         */
+        public ListParametersBuilder resolveSymbolicLinks()
+        {
+            this.resolveSymbolicLinks = true;
+            return this;
+        }
+
+        /**
+         * If <var>resolveSymbolicLinks</var> is <code>true</code>, resolve symbolic links to their
+         * link targets, if it is <code>false</code>, do not resolve symbolic links to their link
+         * targets.
+         * <p>
+         * If set to <code>true</code>, this makes symbolic links kind of appear like hard links in
+         * the listing. Note, however, that symbolic links to directories being resolved do not lead
+         * to the directory being traversed expect if also {@link #followSymbolicLinks()} is given.
+         */
+        public ListParametersBuilder resolveSymbolicLinks(@SuppressWarnings("hiding")
+        boolean resolveSymbolicLinks)
+        {
+            this.resolveSymbolicLinks = resolveSymbolicLinks;
+            return this;
+        }
+
+        /**
+         * Traverse a directory that was resolved from a symbolic link.
+         * <p>
+         * Only effective if recursive listing is enabled.
+         */
+        public ListParametersBuilder followSymbolicLinks()
+        {
+            this.followSymbolicLinks = true;
+            return this;
+        }
+
+        /**
+         * If <var>followSymbolicLinks</var> is set to <code>true</code>, traverse a directory that
+         * was resolved from a symbolic link, if it is <code>false</code>, do not traverse a
+         * directory when it was resolved from a symbolic link.
+         * <p>
+         * Only effective if recursive listing is enabled.
+         */
+        public ListParametersBuilder followSymbolicLinks(@SuppressWarnings("hiding")
+        boolean followSymbolicLinks)
+        {
+            this.followSymbolicLinks = followSymbolicLinks;
+            return this;
+        }
+
+        /**
+         * Returns the {@link ListParameters} object constructed.
+         */
+        public ListParameters get()
+        {
+            return new ListParameters(recursive, readLinkTargets, testArchive,
+                    suppressDirectoryEntries, includeTopLevelDirectoryEntry, resolveSymbolicLinks,
+                    followSymbolicLinks);
+        }
+    }
+
+    /**
+     * Starts building new list parameters.
+     * 
+     * @return A new {@link ListParametersBuilder}.
+     */
+    public static ListParametersBuilder build()
+    {
+        return new ListParametersBuilder();
+    }
+
+    private ListParameters(boolean recursive, boolean readLinkTargets, boolean testArchive,
+            boolean suppressDirectoryEntries, boolean includeTopLevelDirectoryEntry,
+            boolean resolveSymbolicLinks, boolean followSymbolicLinks)
+    {
+        this.recursive = recursive;
+        this.readLinkTargets = readLinkTargets || resolveSymbolicLinks;
+        this.testArchive = testArchive;
+        this.suppressDirectoryEntries = suppressDirectoryEntries;
+        this.includeTopLevelDirectoryEntry = includeTopLevelDirectoryEntry;
+        this.resolveSymbolicLinks = resolveSymbolicLinks;
+        this.followSymbolicLinks = followSymbolicLinks;
+    }
+
+    /**
+     * Returns if recursive listing is enabled, i.e. if the listing will traverse into
+     * sub-directories.
+     * 
+     * @see ListParametersBuilder#recursive(boolean)
+     */
+    public boolean isRecursive()
+    {
+        return recursive;
+    }
+
+    /**
+     * Returns if symbolic link targets should be read.
+     * 
+     * @see ListParametersBuilder#readLinkTargets(boolean)
+     */
+    public boolean isReadLinkTargets()
+    {
+        return readLinkTargets;
+    }
+
+    /**
+     * Returns if the archive should be tested for integrity.
+     * 
+     * @see ListParametersBuilder#testArchive(boolean)
+     */
+    public boolean isTestArchive()
+    {
+        return testArchive;
+    }
+
+    /**
+     * Returns if directory entries should be suppressed from being listed.
+     * 
+     * @see ListParametersBuilder#suppressDirectoryEntries(boolean)
+     */
+    public boolean isSuppressDirectoryEntries()
+    {
+        return suppressDirectoryEntries;
+    }
+
+    /**
+     * Returns if the top-level directory entry should be listed as well.
+     * 
+     * @see ListParametersBuilder#includeTopLevelDirectoryEntry(boolean)
+     */
+    public boolean isIncludeTopLevelDirectoryEntry()
+    {
+        return includeTopLevelDirectoryEntry;
+    }
+
+    /**
+     * Returns if symbolic links should be resolved.
+     * 
+     * @see ListParametersBuilder#resolveSymbolicLinks(boolean)
+     */
+    public boolean isResolveSymbolicLinks()
+    {
+        return resolveSymbolicLinks;
+    }
+
+    /**
+     * Returns if directories resolved from symbolic links should be traversed.
+     * 
+     * @see ListParametersBuilder#followSymbolicLinks(boolean)
+     */
+    public boolean isFollowSymbolicLinks()
+    {
+        return followSymbolicLinks;
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/NewArchiveEntry.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/NewArchiveEntry.java
new file mode 100644
index 0000000..22bd693
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/NewArchiveEntry.java
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import ch.systemsx.cisd.base.unix.FileLinkType;
+
+/**
+ * A class to describe a new (yet to be created) archive entry.
+ * 
+ * @author Bernd Rinn
+ */
+public abstract class NewArchiveEntry
+{
+    private final String parentPath;
+
+    private final String name;
+
+    private final FileLinkType linkType;
+
+    private final String linkTarget;
+
+    private long lastModified;
+
+    private int uid;
+
+    private int gid;
+
+    private short permissions;
+
+    private long size;
+
+    private int crc32;
+
+    /**
+     * A class to describe a new regular file archive entry.
+     */
+    public static final class NewFileArchiveEntry extends NewArchiveEntry
+    {
+        private boolean compress;
+
+        private int chunkSize;
+
+        private NewFileArchiveEntry(String parentPath, String name)
+        {
+            super(parentPath, name, FileLinkType.REGULAR_FILE, null);
+        }
+
+        @Override
+        public NewFileArchiveEntry lastModified(long lastModified)
+        {
+            super.lastModified(lastModified);
+            return this;
+        }
+
+        @Override
+        public NewFileArchiveEntry uid(int uid)
+        {
+            super.uid(uid);
+            return this;
+        }
+
+        @Override
+        public NewFileArchiveEntry gid(int gid)
+        {
+            super.gid(gid);
+            return this;
+        }
+
+        @Override
+        public NewFileArchiveEntry permissions(short permissions)
+        {
+            super.permissions(permissions);
+            return this;
+        }
+
+        public NewFileArchiveEntry compress()
+        {
+            this.compress = true;
+            return this;
+        }
+
+        public NewFileArchiveEntry compress(@SuppressWarnings("hiding")
+        boolean compress)
+        {
+            this.compress = compress;
+            return this;
+        }
+
+        public boolean isCompress()
+        {
+            return compress;
+        }
+
+        /**
+         * @param chunkSize The chunk size of the file in the archive. Will be capped to 10MB.
+         */
+        public NewFileArchiveEntry chunkSize(@SuppressWarnings("hiding")
+        int chunkSize)
+        {
+            this.chunkSize = chunkSize;
+            return this;
+        }
+
+        public int getChunkSize()
+        {
+            return chunkSize;
+        }
+
+    }
+
+    /**
+     * A class to describe a new symlink archive entry.
+     */
+    public static final class NewSymLinkArchiveEntry extends NewArchiveEntry
+    {
+        private NewSymLinkArchiveEntry(String parentPath, String name, String linkTarget)
+        {
+            super(parentPath, name, FileLinkType.SYMLINK, linkTarget);
+        }
+
+        @Override
+        public NewSymLinkArchiveEntry lastModified(long lastModified)
+        {
+            super.lastModified(lastModified);
+            return this;
+        }
+
+        @Override
+        public NewSymLinkArchiveEntry uid(int uid)
+        {
+            super.uid(uid);
+            return this;
+        }
+
+        @Override
+        public NewSymLinkArchiveEntry gid(int gid)
+        {
+            super.gid(gid);
+            return this;
+        }
+
+        @Override
+        public NewSymLinkArchiveEntry permissions(short permissions)
+        {
+            super.permissions(permissions);
+            return this;
+        }
+    }
+
+    /**
+     * A class to describe a new directory archive entry.
+     */
+    public static final class NewDirectoryArchiveEntry extends NewArchiveEntry
+    {
+        private NewDirectoryArchiveEntry(String parentPath, String name)
+        {
+            super(parentPath, name, FileLinkType.DIRECTORY, null);
+        }
+
+        @Override
+        public NewDirectoryArchiveEntry lastModified(long lastModified)
+        {
+            super.lastModified(lastModified);
+            return this;
+        }
+
+        @Override
+        public NewDirectoryArchiveEntry uid(int uid)
+        {
+            super.uid(uid);
+            return this;
+        }
+
+        @Override
+        public NewDirectoryArchiveEntry gid(int gid)
+        {
+            super.gid(gid);
+            return this;
+        }
+
+        @Override
+        public NewDirectoryArchiveEntry permissions(short permissions)
+        {
+            super.permissions(permissions);
+            return this;
+        }
+    }
+
+    /**
+     * @param path The path of the file in the archive.
+     */
+    public static NewFileArchiveEntry file(String path)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        if (name.length() == 0)
+        {
+            throw new ArchivingException(path, "Path does not contain a name.");
+        }
+        return new NewFileArchiveEntry(parentPath, name);
+    }
+
+    /**
+     * @param parentPath The parent path of the file in the archive.
+     * @param name The name of the file in the archive.
+     */
+    public static NewFileArchiveEntry file(String parentPath, String name)
+    {
+        return new NewFileArchiveEntry(parentPath, name);
+    }
+
+    /**
+     * @param path The path of the symlink in the archive.
+     * @param linkTarget the link target of the symlink.
+     */
+    public static NewSymLinkArchiveEntry symlink(String path, String linkTarget)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        if (name.length() == 0)
+        {
+            throw new ArchivingException(path, "Path does not contain a name.");
+        }
+        return new NewSymLinkArchiveEntry(parentPath, name, linkTarget);
+    }
+
+    public static NewSymLinkArchiveEntry symlink(String parentPath, String name, String linkTarget)
+    {
+        return new NewSymLinkArchiveEntry(parentPath, name, linkTarget);
+    }
+
+    public static NewDirectoryArchiveEntry directory(String path)
+    {
+        final String normalizedPath = Utils.normalizePath(path);
+        final String parentPath = Utils.getParentPath(normalizedPath);
+        final String name = Utils.getName(normalizedPath);
+        if (name.length() == 0)
+        {
+            throw new ArchivingException(path, "Path does not contain a name.");
+        }
+        return new NewDirectoryArchiveEntry(parentPath, name);
+    }
+
+    public static NewDirectoryArchiveEntry directory(String parentPath, String name)
+    {
+        return new NewDirectoryArchiveEntry(parentPath, name);
+    }
+
+    private NewArchiveEntry(String parentPath, String name, FileLinkType linkType, String linkTarget)
+    {
+        this.parentPath = Utils.normalizePath(parentPath);
+        this.name = name;
+        this.linkType = linkType;
+        this.linkTarget = linkTarget;
+        this.size = Utils.UNKNOWN;
+        this.lastModified = System.currentTimeMillis() / 1000;
+        this.uid = Utils.getCurrentUid();
+        this.gid = Utils.getCurrentGid();
+        this.permissions = 0755;
+    }
+
+    public String getParentPath()
+    {
+        return parentPath;
+    }
+
+    public long getLastModified()
+    {
+        return lastModified;
+    }
+
+    public NewArchiveEntry lastModified(@SuppressWarnings("hiding")
+    long lastModified)
+    {
+        this.lastModified = lastModified;
+        return this;
+    }
+
+    public int getUid()
+    {
+        return uid;
+    }
+
+    public NewArchiveEntry uid(@SuppressWarnings("hiding")
+    int uid)
+    {
+        this.uid = uid;
+        return this;
+    }
+
+    public int getGid()
+    {
+        return gid;
+    }
+
+    public NewArchiveEntry gid(@SuppressWarnings("hiding")
+    int gid)
+    {
+        this.gid = gid;
+        return this;
+    }
+
+    public short getPermissions()
+    {
+        return permissions;
+    }
+
+    public NewArchiveEntry permissions(@SuppressWarnings("hiding")
+    short permissions)
+    {
+        this.permissions = permissions;
+        return this;
+    }
+
+    public int getCrc32()
+    {
+        return crc32;
+    }
+
+    void setCrc32(int crc32)
+    {
+        this.crc32 = crc32;
+    }
+
+    public String getName()
+    {
+        return name;
+    }
+
+    public FileLinkType getLinkType()
+    {
+        return linkType;
+    }
+
+    public String getLinkTarget()
+    {
+        return linkTarget;
+    }
+
+    public long getSize()
+    {
+        return size;
+    }
+
+    void setSize(long size)
+    {
+        this.size = size;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/UnarchivingException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/UnarchivingException.java
new file mode 100644
index 0000000..be6eb8d
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/UnarchivingException.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+
+/**
+ * Exception thrown when unarchiving a file / directory fails.
+ *
+ * @author Bernd Rinn
+ */
+public class UnarchivingException extends ArchiverException
+{
+    private static final long serialVersionUID = 1L;
+    
+    private static final String OPERATION_NAME = "unarchiving";
+    
+    public UnarchivingException(String msg)
+    {
+        super("GENERAL", OPERATION_NAME, msg);
+    }
+    
+    public UnarchivingException(String objectPath, String detailedMsg)
+    {
+        super(objectPath, OPERATION_NAME, detailedMsg);
+    }
+    
+    public UnarchivingException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public UnarchivingException(String objectPath, RuntimeException cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public UnarchivingException(File file, IOException cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+    public UnarchivingException(File file, IOExceptionUnchecked cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/Utils.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/Utils.java
new file mode 100644
index 0000000..17854b1
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/Utils.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+
+import org.apache.commons.io.FilenameUtils;
+
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.utilities.OSUtilities;
+import ch.systemsx.cisd.hdf5.HDF5ObjectType;
+
+/**
+ * Utility methods for h5ar.
+ * 
+ * @author Bernd Rinn
+ */
+final class Utils
+{
+    static final long MILLIS_PER_SECOND = 1000L;
+
+    final static int UNKNOWN = -1;
+
+    final static short UNKNOWN_S = -1;
+
+    private static final char[] HEX_CHARACTERS =
+        { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', };
+
+    private Utils()
+    {
+        // Not to be instantiated.
+    }
+
+    /**
+     * Converts a CRC32 checksum to a string representation.
+     */
+    static String crc32ToString(final int checksum)
+    {
+        final char buf[] = new char[8];
+        int w = checksum;
+        for (int i = 0, x = 7; i < 4; i++)
+        {
+            buf[x--] = HEX_CHARACTERS[w & 0xf];
+            buf[x--] = HEX_CHARACTERS[(w >>> 4) & 0xf];
+            w >>= 8;
+        }
+        return new String(buf);
+    }
+
+    /**
+     * Creates a string representation for the given permissions.
+     */
+    static String permissionsToString(int permissions, boolean directory, boolean numeric)
+    {
+        if (numeric)
+        {
+            return Integer.toString(permissions, 8);
+        } else
+        {
+            final short perms = (short) permissions;
+            final StringBuilder b = new StringBuilder();
+            b.append(directory ? 'd' : '-');
+            b.append((perms & Unix.S_IRUSR) != 0 ? 'r' : '-');
+            b.append((perms & Unix.S_IWUSR) != 0 ? 'w' : '-');
+            b.append((perms & Unix.S_IXUSR) != 0 ? ((perms & Unix.S_ISUID) != 0 ? 's' : 'x')
+                    : ((perms & Unix.S_ISUID) != 0 ? 'S' : '-'));
+            b.append((perms & Unix.S_IRGRP) != 0 ? 'r' : '-');
+            b.append((perms & Unix.S_IWGRP) != 0 ? 'w' : '-');
+            b.append((perms & Unix.S_IXGRP) != 0 ? ((perms & Unix.S_ISGID) != 0 ? 's' : 'x')
+                    : ((perms & Unix.S_ISGID) != 0 ? 'S' : '-'));
+            b.append((perms & Unix.S_IROTH) != 0 ? 'r' : '-');
+            b.append((perms & Unix.S_IWOTH) != 0 ? 'w' : '-');
+            b.append((perms & Unix.S_IXOTH) != 0 ? ((perms & Unix.S_ISVTX) != 0 ? 't' : 'x')
+                    : ((perms & Unix.S_ISVTX) != 0 ? 'T' : '-'));
+            return b.toString();
+        }
+    }
+
+    /**
+     * Returns the parent of <var>normalizedPath</var>, or "" if <var>normalizedPath</var> is the
+     * root "/".
+     */
+    static String getParentPath(String normalizedPath)
+    {
+        final int lastSlashIdx = normalizedPath.lastIndexOf('/');
+        if (lastSlashIdx <= 0)
+        {
+            return normalizedPath.length() <= 1 ? "" : "/";
+        } else
+        {
+            return normalizedPath.substring(0, lastSlashIdx);
+        }
+    }
+
+    /**
+     * Returns the name part of <var>path</var>.
+     */
+    static String getName(String path)
+    {
+        return path.substring(path.lastIndexOf('/') + 1);
+    }
+
+    private static String normalizeToUnix(String unixOrWindowsPath)
+    {
+        final String pathToNormalize =
+                OSUtilities.isWindows() ? unixOrWindowsPath.replace('/', '\\') : unixOrWindowsPath;
+        final String normalized = FilenameUtils.normalize(pathToNormalize);
+        return OSUtilities.isWindows() ? normalized.replace('\\', '/') : normalized;
+    }
+
+    /**
+     * Returns a normalized path: it starts with "/" and doesn't have "/" at the end, except if it
+     * is the root path "/". This method internally calls {@link FilenameUtils#normalize(String)}
+     * and thus removes any '.' and '..' elements.
+     */
+    static String normalizePath(String hdf5ObjectPath)
+    {
+        String prenormalizedPath = normalizeToUnix(hdf5ObjectPath);
+        if (prenormalizedPath == null)
+        {
+            prenormalizedPath = normalizeToUnix(hdf5ObjectPath.replace("//", "/"));
+            if (prenormalizedPath == null)
+            {
+                prenormalizedPath = hdf5ObjectPath.replace("//", "/");
+            }
+        }
+        final String pathStartingWithSlash =
+                (prenormalizedPath.startsWith("/") ? prenormalizedPath : "/" + prenormalizedPath);
+        return (pathStartingWithSlash.length() > 1 && pathStartingWithSlash.endsWith("/")) ? pathStartingWithSlash
+                .substring(0, pathStartingWithSlash.length() - 1) : pathStartingWithSlash;
+    }
+
+    /**
+     * Returns the absolute normalized {@link File} for <var>path</var>.
+     */
+    static File normalizePath(File path)
+    {
+        return new File(FilenameUtils.normalizeNoEndSeparator(path.getAbsolutePath()));
+    }
+
+    /**
+     * Concatenates <var>parentDirectory</var> and <var>name</var> to a new path and return it.
+     */
+    static String concatLink(String parentDirectory, String name)
+    {
+        return parentDirectory.endsWith("/") ? parentDirectory + name : parentDirectory + "/"
+                + name;
+    }
+
+    /**
+     * /** Returns an {@link ArchiveEntry} from a {@link LinkRecord}. Can handle <code>null</code>
+     * {@link LinkRecord}s.
+     */
+    static ArchiveEntry tryToArchiveEntry(String dir, String path, LinkRecord linkOrNull,
+            IdCache idCache)
+    {
+        return linkOrNull != null ? new ArchiveEntry(dir, path, linkOrNull, idCache) : null;
+    }
+
+    /**
+     * Returns the UID of the current user or {@link Utils#UNKNOWN}, if that cannot be determined.
+     */
+    static int getCurrentUid()
+    {
+        if (Unix.isOperational())
+        {
+            return Unix.getUid();
+        } else
+        {
+            return Utils.UNKNOWN;
+        }
+    }
+
+    /**
+     * Returns the GID of the current user or {@link Utils#UNKNOWN}, if that cannot be determined.
+     */
+    static int getCurrentGid()
+    {
+        if (Unix.isOperational())
+        {
+            return Unix.getGid();
+        } else
+        {
+            return Utils.UNKNOWN;
+        }
+    }
+
+    static FileLinkType translateType(final HDF5ObjectType hdf5Type)
+    {
+        switch (hdf5Type)
+        {
+            case DATASET:
+                return FileLinkType.REGULAR_FILE;
+            case GROUP:
+                return FileLinkType.DIRECTORY;
+            case SOFT_LINK:
+                return FileLinkType.SYMLINK;
+            default:
+                return FileLinkType.OTHER;
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyArchiveException.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyArchiveException.java
new file mode 100644
index 0000000..985771f
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyArchiveException.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import java.io.File;
+import java.io.IOException;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+
+/**
+ * Exception thrown when verifying a file / directory in an archive against the file system.
+ *
+ * @author Bernd Rinn
+ */
+public class VerifyArchiveException extends ArchiverException
+{
+
+    private static final long serialVersionUID = 1L;
+    
+    private static final String OPERATION_NAME = "verifying";
+    
+    public VerifyArchiveException(String objectPath, String detailedMsg)
+    {
+        super(objectPath, OPERATION_NAME, detailedMsg);
+    }
+    
+    public VerifyArchiveException(String objectPath, HDF5Exception cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public VerifyArchiveException(String objectPath, RuntimeException cause)
+    {
+        super(objectPath, OPERATION_NAME, cause);
+    }
+    
+    public VerifyArchiveException(File file, IOException cause)
+    {
+        super(file, OPERATION_NAME, cause);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyParameters.java b/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyParameters.java
new file mode 100644
index 0000000..7b8048b
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/VerifyParameters.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+
+/**
+ * A class that represents parameters for
+ * {@link HDF5Archiver#verifyAgainstFilesystem(String, java.io.File, IArchiveEntryVisitor, VerifyParameters)
+ * )} .
+ * 
+ * @author Bernd Rinn
+ */
+public final class VerifyParameters
+{
+    private final boolean recursive;
+
+    private final boolean numeric;
+
+    private final boolean verifyAttributes;
+
+    public static final VerifyParameters DEFAULT = new VerifyParameters(true, false, false);
+
+    /**
+     * A class for constructing a new verify parameters object.
+     */
+    public static final class VerifyParametersBuilder
+    {
+        private boolean recursive = true;
+
+        private boolean numeric = false;
+
+        private boolean verifyAttributes = false;
+
+        private VerifyParametersBuilder()
+        {
+        }
+
+        /**
+         * Perform a non-recursive verification, i.e. do not traverse sub-directories.
+         */
+        public VerifyParametersBuilder nonRecursive()
+        {
+            this.recursive = false;
+            return this;
+        }
+
+        /**
+         * If <var>recursive</var> is <code>true</code>, perform a recursive verification, if it is
+         * <code>false</code>, perform a non-recursive listing, i.e. do not traverse
+         * sub-directories.
+         */
+        public VerifyParametersBuilder recursive(@SuppressWarnings("hiding")
+        boolean recursive)
+        {
+            this.recursive = recursive;
+            return this;
+        }
+
+        /**
+         * Reports user ids and permissions as numerical values.
+         * <p>
+         * This is a pure display parameter that is only relevant if {@link #verifyAttributes()} has
+         * been set.
+         */
+        public VerifyParametersBuilder numeric()
+        {
+            this.numeric = true;
+            return this;
+        }
+
+        /**
+         * If <var>numeric</var> is <code>true</code>, reports user ids and permissions as numerical
+         * values, if it is <code>false</code>, it reports user ids and permissions resolved to
+         * strings. This is a pure display parameter.
+         * <p>
+         * This is a pure display parameter that is only relevant if {@link #verifyAttributes()} has
+         * been set.
+         */
+        public VerifyParametersBuilder numeric(@SuppressWarnings("hiding")
+        boolean numeric)
+        {
+            this.numeric = numeric;
+            return this;
+        }
+
+        /**
+         * Verifies also last modification time, file ownership and access permissions.
+         */
+        public VerifyParametersBuilder verifyAttributes()
+        {
+            this.verifyAttributes = true;
+            return this;
+        }
+
+        /**
+         * If <var>verifyAttributes</var> is <code>true</code>, verifies also last modification
+         * time, file ownership and access permissions, if it is <code>false</code>, check only the
+         * types and content of entries.
+         */
+        public VerifyParametersBuilder verifyAttributes(@SuppressWarnings("hiding")
+        boolean verifyAttributes)
+        {
+            this.verifyAttributes = verifyAttributes;
+            return this;
+        }
+
+        /**
+         * Returns the {@link VerifyParameters} object constructed.
+         */
+        public VerifyParameters get()
+        {
+            return new VerifyParameters(recursive, numeric, verifyAttributes);
+        }
+    }
+
+    /**
+     * Starts building new verify parameters.
+     * 
+     * @return A new {@link VerifyParametersBuilder}.
+     */
+    public static VerifyParametersBuilder build()
+    {
+        return new VerifyParametersBuilder();
+    }
+
+    private VerifyParameters(boolean recursive, boolean numeric, boolean verifyAttributes)
+    {
+        this.recursive = recursive;
+        this.numeric = numeric;
+        this.verifyAttributes = verifyAttributes;
+    }
+
+    /**
+     * Returns if recursive verification is enabled, i.e. if the verify process will traverse into
+     * sub-directories.
+     * 
+     * @see VerifyParametersBuilder#recursive(boolean)
+     */
+    public boolean isRecursive()
+    {
+        return recursive;
+    }
+
+    /**
+     * Returns if user id and permissions failures should be reported numerically.
+     * 
+     * @see VerifyParametersBuilder#numeric(boolean)
+     */
+    public boolean isNumeric()
+    {
+        return numeric;
+    }
+
+    /**
+     * Returns if file attributes (last modification time, file ownerships and access permissions)
+     * are checked, too.
+     * 
+     * @see VerifyParametersBuilder#verifyAttributes(boolean)
+     */
+    public boolean isVerifyAttributes()
+    {
+        return verifyAttributes;
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/h5ar/package.html b/source/java/ch/systemsx/cisd/hdf5/h5ar/package.html
new file mode 100644
index 0000000..ad57a39
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/h5ar/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+     "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+  <head>
+    <title>Tools Package</title>
+  </head>
+  <body>
+    <p>
+    This package contains the JHDF5 <a href="IHDF5Archiver.html">archiver</a>. 
+    </p>
+  </body>
+</html> 
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5.java
new file mode 100644
index 0000000..f83effd
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5.java
@@ -0,0 +1,3934 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import ch.systemsx.cisd.base.utilities.NativeLibraryUtilities;
+
+/**
+ * The low-level C function wrappers. These functions are <i>not</i> thread-safe and need to be used
+ * through thread-safe wrappers.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ */
+class H5
+{
+    /** Expected major number of the library. */
+    private final static int expectedMajnum = 1;
+
+    /** Expected minor number of the library. */
+    private final static int expectedMinnum = 8;
+
+    /** Expected minimal release number of the library. */
+    private final static int expectedRelnum = 13;
+
+    static
+    {
+        if (NativeLibraryUtilities.loadNativeLibrary("jhdf5") == false)
+        {
+            throw new UnsupportedOperationException("No suitable HDF5 native library found for this platform.");
+        }
+
+        // Important! Exit quietly
+        try
+        {
+            synchronized (ncsa.hdf.hdf5lib.H5.class)
+            {
+                H5dont_atexit();
+            }
+        } catch (final HDF5LibraryException e)
+        {
+            System.exit(1);
+        }
+
+        // Important! Disable error output to C stdout
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            H5error_off();
+        }
+
+        // Ensure we have the expected version of the library (with at least the expected release
+        // number)
+        final int[] libversion = new int[3];
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            H5get_libversion(libversion);
+        }
+        if (libversion[0] != expectedMajnum || libversion[1] != expectedMinnum
+                || libversion[2] < expectedRelnum)
+        {
+            throw new UnsupportedOperationException("The HDF5 native library is outdated! It is version "
+                    + libversion[0] + "." + libversion[1] + "." + libversion[2]
+                    + ", but we require " + expectedMajnum + "." + expectedMinnum + ".x with x >= "
+                    + expectedRelnum + ".");
+        }
+    }
+
+    /** Call to ensure that the native library is loaded. */
+    public static void ensureNativeLibIsLoaded()
+    {
+    }
+
+    // ////////////////////////////////////////////////////////////////
+
+    /**
+     * J2C converts a Java constant to an HDF5 constant determined at runtime
+     * 
+     * @param java_constant The value of Java constant
+     * @return the value of an HDF5 constant determined at runtime
+     */
+    public static native int J2C(int java_constant);
+
+    /**
+     * Turn off error handling By default, the C library prints the error stack of the HDF-5 C
+     * library on stdout. This behavior may be disabled by calling H5error_off().
+     */
+    public static native int H5error_off();
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // Functions related to variable-length string copying       //
+    // //
+    // ////////////////////////////////////////////////////////////
+    
+    /**
+     * Returns the size of a pointer on this platform.
+     */
+    public static native int getPointerSize();
+    
+    /**
+     * Creates a C copy of str (using calloc) and put the reference of it into buf at bufOfs.
+     */
+    public static native int compoundCpyVLStr(String str, byte[] buf, int bufOfs);
+    
+    /**
+     * Creates a Java copy from a C char* pointer in the buf at bufOfs. 
+     */
+    public static native String createVLStrFromCompound(byte[] buf, int bufOfs);
+    
+    /**
+     * Frees the variable-length strings in compound buf, where one compound has size recordSize and the 
+     * variable-length members can be found at byte-offsets vlIndices.
+     */
+    public static native int freeCompoundVLStr(byte[] buf, int recordSize, int[] vlIndices);
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5: General Library Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5open initialize the library.
+     * 
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5open() throws HDF5LibraryException;
+
+    /**
+     * H5close flushes all data to disk, closes all file identifiers, and cleans up all memory used
+     * by the library.
+     * 
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5close() throws HDF5LibraryException;
+
+    /**
+     * H5dont_atexit indicates to the library that an atexit() cleanup routine should not be
+     * installed. In order to be effective, this routine must be called before any other HDF
+     * function calls, and must be called each time the library is loaded/linked into the
+     * application (the first time and after it's been un-loaded).
+     * <P>
+     * This is called by the static initializer, so this should never need to be explicitly called
+     * by a Java program.
+     * 
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    private static native int H5dont_atexit() throws HDF5LibraryException;
+
+    /**
+     * H5get_libversion retrieves the major, minor, and release numbers of the version of the HDF
+     * library which is linked to the application.
+     * 
+     * @param libversion The version information of the HDF library.
+     * 
+     *            <pre>
+     * 
+     *            libversion[0] = The major version of the library. libversion[1] = The minor
+     *            version of the library. libversion[2] = The release number of the library.
+     * 
+     * </pre>
+     * @return a non-negative value if successful, along with the version information.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5get_libversion(int[] libversion) throws HDF5LibraryException;
+
+    /**
+     * H5check_version verifies that the arguments match the version numbers compiled into the
+     * library.
+     * 
+     * @param majnum The major version of the library.
+     * @param minnum The minor version of the library.
+     * @param relnum The release number of the library.
+     * @return a non-negative value if successful. Upon failure (when the versions do not match),
+     *         this function causes the application to abort (i.e., crash) See C API function:
+     *         herr_t H5check_version()
+     */
+    public static native int H5check_version(int majnum, int minnum, int relnum);
+
+    /**
+     * H5garbage_collect collects on all free-lists of all types.
+     * <p>
+     * Note: this is new with HDF5.1.2.2. If using an earlier version, use 'configure
+     * --enable-hdf5_1_2_1' so this routine will fail safely.
+     * 
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5garbage_collect() throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5E: Error Stack //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Eclear clears the error stack for the current thread. H5Eclear can fail if there are
+     * problems initializing the library.
+     * <p>
+     * This may be used by exception handlers to assure that the error condition in the HDF-5
+     * library has been reset.
+     * 
+     * @return Returns a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Eclear() throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5A: Attribute Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Lexists returns <code>true</code> if an attribute with <var>name</var> exists for the
+     * object defined by <var>obj_id</var> and <code> false </code> otherwise.
+     */
+    public static native boolean H5Aexists(int obj_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Acreate creates an attribute which is attached to the object specified with loc_id.
+     * 
+     * @param loc_id IN: Object (dataset, group, or named datatype) to be attached to.
+     * @param name IN: Name of attribute to create.
+     * @param type_id IN: Identifier of datatype for attribute.
+     * @param space_id IN: Identifier of dataspace for attribute.
+     * @param create_plist_id IN: Identifier of creation property list (currently not used).
+     * @param access_plist_id IN: Attribute access property list identifier (currently not used).
+     * @return an attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Acreate(int loc_id, String name, int type_id, int space_id,
+            int create_plist_id, int access_plist_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Aopen_name opens an attribute specified by its name, name, which is attached to the object
+     * specified with loc_id.
+     * 
+     * @param loc_id IN: Identifier of a group, dataset, or named datatype atttribute
+     * @param name IN: Attribute name.
+     * @return attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Aopen_name(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Aopen_idx opens an attribute which is attached to the object specified with loc_id. The
+     * location object may be either a group, dataset, or named datatype, all of which may have any
+     * sort of attribute.
+     * 
+     * @param loc_id IN: Identifier of the group, dataset, or named datatype attribute
+     * @param idx IN: Index of the attribute to open.
+     * @return attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Aopen_idx(int loc_id, int idx) throws HDF5LibraryException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, byte[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, short[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, int[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, long[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, float[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static native int H5Awrite(int attr_id, int mem_type_id, double[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5AwriteString writes a (partial) variable length String attribute, specified by its
+     * identifier attr_id, from the application memory buffer buf into the file.
+     * 
+     * @param attr_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5AwriteString(int attr_id, int mem_type_id, String[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Acopy copies the content of one attribute to another.
+     * 
+     * @param src_aid the identifier of the source attribute
+     * @param dst_aid the identifier of the destinaiton attribute
+     */
+    public static native int H5Acopy(int src_aid, int dst_aid) throws HDF5LibraryException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, byte[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, short[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, int[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, long[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, float[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Aread(int attr_id, int mem_type_id, double[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5AreadVL(int attr_id, int mem_type_id, String[] buf)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Aget_space retrieves a copy of the dataspace for an attribute.
+     * 
+     * @param attr_id IN: Identifier of an attribute.
+     * @return attribute dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Aget_space(int attr_id) throws HDF5LibraryException;
+
+    /**
+     * H5Aget_type retrieves a copy of the datatype for an attribute.
+     * 
+     * @param attr_id IN: Identifier of an attribute.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Aget_type(int attr_id) throws HDF5LibraryException;
+
+    /**
+     * H5Aget_name retrieves the name of an attribute specified by the identifier, attr_id.
+     * 
+     * @param attr_id IN: Identifier of the attribute.
+     * @param buf_size IN: The size of the buffer to store the name in.
+     * @param name OUT: Buffer to store name in.
+     * @exception ArrayIndexOutOfBoundsException JNI error writing back array
+     * @exception ArrayStoreException JNI error writing back array
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @exception IllegalArgumentException - bub_size <= 0.
+     * @return the length of the attribute's name if successful.
+     */
+    public static native long H5Aget_name(int attr_id, long buf_size, String[] name)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Aget_num_attrs returns the number of attributes attached to the object specified by its
+     * identifier, loc_id.
+     * 
+     * @param loc_id IN: Identifier of a group, dataset, or named datatype.
+     * @return the number of attributes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Aget_num_attrs(int loc_id) throws HDF5LibraryException;
+
+    /**
+     * H5Adelete removes the attribute specified by its name, name, from a dataset, group, or named
+     * datatype.
+     * 
+     * @param loc_id IN: Identifier of the dataset, group, or named datatype.
+     * @param name IN: Name of the attribute to delete.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Adelete(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Aclose terminates access to the attribute specified by its identifier, attr_id.
+     * 
+     * @param attr_id IN: Attribute to release access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Aclose(int attr_id) throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5D: Datasets Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Dcreate creates a data set with a name, name, in the file or in the group specified by the
+     * identifier loc_id.
+     * 
+     * @param loc_id Identifier of the file or group to create the dataset within.
+     * @param name The name of the dataset to create.
+     * @param type_id Identifier of the datatype to use when creating the dataset.
+     * @param space_id Identifier of the dataspace to use when creating the dataset.
+     * @param link_create_plist_id Identifier of the link creation property list.
+     * @param dset_create_plist_id Identifier of the dataset creation property list.
+     * @param dset_access_plist_id Identifier of the dataset access property list.
+     * @return a dataset identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Dcreate(int loc_id, String name, int type_id, int space_id,
+            int link_create_plist_id, int dset_create_plist_id, int dset_access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Dopen opens an existing dataset for access in the file or group specified in loc_id.
+     * 
+     * @param loc_id Identifier of the dataset to open or the file or group
+     * @param name The name of the dataset to access.
+     * @param access_plist_id Dataset access property list.
+     * @return a dataset identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Dopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Dchdir_ext(String dir_name) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dgetdir_ext(String[] dir_name, int size)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Dget_space returns an identifier for a copy of the dataspace for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Dget_space(int dataset_id) throws HDF5LibraryException;
+
+    /**
+     * H5Dget_type returns an identifier for a copy of the datatype for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Dget_type(int dataset_id) throws HDF5LibraryException;
+
+    /**
+     * H5Dget_create_plist returns an identifier for a copy of the dataset creation property list
+     * for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a dataset creation property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Dget_create_plist(int dataset_id) throws HDF5LibraryException;
+
+    /**
+     * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into
+     * the application memory buffer buf.
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer to store data read from the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, byte[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5DreadVL(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, Object[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5DwriteString writes a (partial) variable length String dataset, specified by its identifier
+     * dataset_id, from the application memory buffer buf into the file.
+     * <p>
+     * <i>contributed by Rosetta Biosoftware.</i>
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5DwriteString(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the
+     * application memory buffer buf into the file.
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, byte[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Dextend verifies that the dataset is at least of size size.
+     * 
+     * @param dataset_id Identifier of the dataset.
+     * @param size Array containing the new magnitude of each dimension.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size array is null.
+     */
+    public static native int H5Dextend(int dataset_id, byte[] size) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static int H5Dextend(final int dataset_id, final long[] size) throws HDF5Exception,
+            NullPointerException
+    {
+        final byte[] buf = HDFNativeData.longToByte(size);
+
+        return H5Dextend(dataset_id, buf);
+    }
+
+    /**
+     * H5Dset_extent sets the size of the dataset to <var>size</var>. Make sure that no important
+     * are lost since this method will not check that the data dimensions are not larger than
+     * <var>size</var>.
+     * 
+     * @param dataset_id Identifier of the dataset.
+     * @param size Array containing the new magnitude of each dimension.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size array is null.
+     */
+    public static native int H5Dset_extent(int dataset_id, byte[] size)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static int H5Dset_extent(final int dataset_id, final long[] size) throws HDF5Exception,
+            NullPointerException
+    {
+        final byte[] buf = HDFNativeData.longToByte(size);
+
+        return H5Dset_extent(dataset_id, buf);
+    }
+
+    /**
+     * H5Dclose ends access to a dataset specified by dataset_id and releases resources used by it.
+     * 
+     * @param dataset_id Identifier of the dataset to finish access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Dclose(int dataset_id) throws HDF5LibraryException;
+
+    // following static native functions are missing from HDF5 RM version 1.0.1
+
+    /**
+     * H5Dget_storage_size returns the amount of storage that is required for the dataset.
+     * 
+     * @param dataset_id Identifier of the dataset in question
+     * @return he amount of storage space allocated for the dataset.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Dget_storage_size(int dataset_id) throws HDF5LibraryException;
+
+    /**
+     * H5Dcopy copies the content of one dataset to another dataset.
+     * 
+     * @param src_did the identifier of the source dataset
+     * @param dst_did the identifier of the destinaiton dataset
+     */
+    public static native int H5Dcopy(int src_did, int dst_did) throws HDF5LibraryException;
+
+    /*
+     *
+     */
+    public static native int H5Dvlen_get_buf_size(int dataset_id, int type_id, int space_id,
+            int[] size) throws HDF5LibraryException;
+
+    /**
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static native int H5Dvlen_reclaim(int type_id, int space_id, int xfer_plist_id,
+            byte[] buf) throws HDF5LibraryException, NullPointerException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5F: File Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Fopen opens an existing file and is the primary function for accessing existing HDF5 files.
+     * 
+     * @param name Name of the file to access.
+     * @param flags File access flags.
+     * @param access_id Identifier for the file access properties list.
+     * @return a file identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Fopen(String name, int flags, int access_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Fcreate is the primary function for creating HDF5 files.
+     * 
+     * @param name Name of the file to access.
+     * @param flags File access flags. Possible values include:
+     *            <UL>
+     *            <LI>H5F_ACC_RDWR Allow read and write access to file.</LI>
+     *            <LI>H5F_ACC_RDONLY Allow read-only access to file.</LI>
+     *            <LI>H5F_ACC_TRUNC Truncate file, if it already exists, erasing all data previously
+     *            stored in the file.</LI>
+     *            <LI>H5F_ACC_EXCL Fail if file already exists.</LI>
+     *            <LI>H5F_ACC_DEBUG Print debug information.</LI>
+     *            <LI>H5P_DEFAULT Apply default file access and creation properties.</LI>
+     *            </UL>
+     * @param create_id File creation property list identifier, used when modifying default file
+     *            meta-data. Use H5P_DEFAULT for default access properties.
+     * @param access_id File access property list identifier. If parallel file access is desired,
+     *            this is a collective call according to the communicator stored in the access_id
+     *            (not supported in Java). Use H5P_DEFAULT for default access properties.
+     * @return a file identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Fcreate(String name, int flags, int create_id, int access_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Fflush causes all buffers associated with a file or object to be immediately flushed
+     * (written) to disk without removing the data from the (memory) cache.
+     * <P>
+     * After this call completes, the file (or object) is in a consistent state and all data written
+     * to date is assured to be permanent.
+     * 
+     * @param object_id Identifier of object used to identify the file. <b>object_id</b> can be any
+     *            object associated with the file, including the file itself, a dataset, a group, an
+     *            attribute, or a named data type.
+     * @param scope specifies the scope of the flushing action, in the case that the HDF-5 file is
+     *            not a single physical file.
+     *            <P>
+     *            Valid values are:
+     *            <UL>
+     *            <LI>H5F_SCOPE_GLOBAL Flushes the entire virtual file.</LI>
+     *            <LI>H5F_SCOPE_LOCAL Flushes only the specified file.</LI>
+     *            </UL>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Fflush(int object_id, int scope) throws HDF5LibraryException;
+
+    /**
+     * H5Fis_hdf5 determines whether a file is in the HDF5 format.
+     * 
+     * @param name File name to check format.
+     * @return true if is HDF-5, false if not.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native boolean H5Fis_hdf5(String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Fget_create_plist returns a file creation property list identifier identifying the creation
+     * properties used to create this file.
+     * 
+     * @param file_id Identifier of the file to get creation property list
+     * @return a file creation property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Fget_create_plist(int file_id) throws HDF5LibraryException;
+
+    /**
+     * H5Fget_access_plist returns the file access property list identifier of the specified file.
+     * 
+     * @param file_id Identifier of file to get access property list of
+     * @return a file access property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Fget_access_plist(int file_id) throws HDF5LibraryException;
+
+    /**
+     * H5Fclose terminates access to an HDF5 file.
+     * 
+     * @param file_id Identifier of a file to terminate access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Fclose(int file_id) throws HDF5LibraryException;
+
+    /**
+     * H5Fmount mounts the file specified by child_id onto the group specified by loc_id and name
+     * using the mount properties plist_id.
+     * 
+     * @param loc_id The identifier for the group onto which the file specified by child_id is to be
+     *            mounted.
+     * @param name The name of the group onto which the file specified by child_id is to be mounted.
+     * @param child_id The identifier of the file to be mounted.
+     * @param plist_id The identifier of the property list to be used.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Fmount(int loc_id, String name, int child_id, int plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * Given a mount point, H5Funmount dissassociates the mount point's file from the file mounted
+     * there.
+     * 
+     * @param loc_id The identifier for the location at which the specified file is to be unmounted.
+     * @param name The name of the file to be unmounted.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Funmount(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Freopen reopens an HDF5 file.
+     * 
+     * @param file_id Identifier of a file to terminate and reopen access to.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @return a new file identifier if successful
+     */
+    public static native int H5Freopen(int file_id) throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5G: Group Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Gcreate creates a new group with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id The file or group identifier.
+     * @param name The absolute or relative name of the new group.
+     * @param link_create_plist_id Property list for link creation.
+     * @param group_create_plist_id Property list for group creation.
+     * @param group_access_plist_id Property list for group access.
+     * @return a valid group identifier for the open group if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Gcreate(int loc_id, String name, int link_create_plist_id,
+            int group_create_plist_id, int group_access_plist_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Gopen opens an existing group with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id File or group identifier within which group is to be open.
+     * @param name Name of group to open.
+     * @param access_plist_id Group access property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @return a valid group identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Gopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Gclose releases resources used by a group which was opened by a call to H5Gcreate() or
+     * H5Gopen().
+     * 
+     * @param group_id Group identifier to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Gclose(int group_id) throws HDF5LibraryException;
+
+    /**
+     * H5Glink creates a new name for an already existing object.
+     * 
+     * @param loc_id File, group, dataset, or datatype identifier.
+     * @param link_type Link type. Possible values are:
+     *            <UL>
+     *            <LI>H5G_LINK_HARD</LI>
+     *            <LI>H5G_LINK_SOFT.</LI>
+     *            </UL>
+     * @param current_name A name of the existing object if link is a hard link. Can be anything for
+     *            the soft link.
+     * @param new_name New name for the object.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    @Deprecated
+    public static native int H5Glink(int loc_id, int link_type, String current_name, String new_name)
+            throws HDF5LibraryException, NullPointerException;
+
+    @Deprecated
+    public static native int H5Glink2(int curr_loc_id, String current_name, int link_type,
+            int new_loc_id, String new_name) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Gunlink removes an association between a name and an object.
+     * 
+     * @param loc_id Identifier of the file containing the object.
+     * @param name Name of the object to unlink.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Gunlink(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Gmove renames an object within an HDF5 file. The original name, src, is unlinked from the
+     * group graph and the new name, dst, is inserted as an atomic operation. Both names are
+     * interpreted relative to loc_id, which is either a file or a group identifier.
+     * 
+     * @param loc_id File or group identifier.
+     * @param src Object's original name.
+     * @param dst Object's new name.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - src or dst is null.
+     */
+    public static native int H5Gmove(int loc_id, String src, String dst)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Gget_linkval returns size characters of the link value through the value argument if loc_id
+     * (a file or group identifier) and name specify a symbolic link.
+     * 
+     * @param loc_id IN: Identifier of the file, group, dataset, or datatype.
+     * @param name IN: Name of the object whose link value is to be checked.
+     * @param size IN: Maximum number of characters of value to be returned.
+     * @param value OUT: Link value.
+     * @return a non-negative value, with the link value in value, if successful.
+     * @exception ArrayIndexOutOfBoundsException Copy back failed
+     * @exception ArrayStoreException Copy back failed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @exception IllegalArgumentException - size is invalid
+     */
+    public static native int H5Gget_linkval(int loc_id, String name, int size, String[] value)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Gset_comment sets the comment for the the object name to comment. Any previously existing
+     * comment is overwritten.
+     * 
+     * @param loc_id IN: Identifier of the file, group, dataset, or datatype.
+     * @param name IN: Name of the object whose comment is to be set or reset.
+     * @param comment IN: The new comment.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name or comment is null.
+     */
+    public static native int H5Gset_comment(int loc_id, String name, String comment)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Gget_comment retrieves the comment for the the object name. The comment is returned in the
+     * buffer comment.
+     * 
+     * @param loc_id IN: Identifier of the file, group, dataset, or datatype.
+     * @param name IN: Name of the object whose comment is to be set or reset.
+     * @param bufsize IN: Anticipated size of the buffer required to hold comment.
+     * @param comment OUT: The comment.
+     * @return the number of characters in the comment, counting the null terminator, if successful
+     * @exception ArrayIndexOutOfBoundsException - JNI error writing back data
+     * @exception ArrayStoreException - JNI error writing back data
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @exception IllegalArgumentException - size < 1, comment is invalid.
+     */
+    public static native int H5Gget_comment(int loc_id, String name, int bufsize, String[] comment)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5I: Identifier Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Iget_type retrieves the type of the object identified by obj_id.
+     * 
+     * @param obj_id IN: Object identifier whose type is to be determined.
+     * @return the object type if successful; otherwise H5I_BADID.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Iget_type(int obj_id) throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5P: Property List Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Pcreate creates a new property as an instance of some property list class.
+     * 
+     * @param type IN: The type of property list to create.
+     * @return a property list identifier (plist) if successful; otherwise Fail (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pcreate(int type) throws HDF5LibraryException;
+
+    /**
+     * H5Pclose terminates access to a property list.
+     * 
+     * @param plist IN: Identifier of the property list to terminate access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pclose(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_class returns the property list class for the property list identified by the plist
+     * parameter.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @return a property list class if successful. Otherwise returns H5P_NO_CLASS (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pget_class(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pcopy copies an existing property list to create a new property list.
+     * 
+     * @param plist IN: Identifier of property list to duplicate.
+     * @return a property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pcopy(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_version retrieves the version information of various objects for a file creation
+     * property list.
+     * 
+     * @param plist IN: Identifier of the file creation property list.
+     * @param version_info OUT: version information.
+     * 
+     *            <pre>
+     * 
+     *            version_info[0] = boot // boot block version number version_info[1] = freelist //
+     *            global freelist version version_info[2] = stab // symbol tabl version number
+     *            version_info[3] = shhdr // hared object header version
+     * 
+     * </pre>
+     * @return a non-negative value, with the values of version_info initialized, if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - version_info is null.
+     * @exception IllegalArgumentException - version_info is illegal.
+     */
+    public static native int H5Pget_version(int plist, int[] version_info)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_userblock sets the user block size of a file creation property list.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param size IN: Size of the user-block in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_userblock(int plist, long size) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_userblock retrieves the size of a user block in a file creation property list.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param size OUT: Pointer to location to return user-block size.
+     * @return a non-negative value and the size of the user block; if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     */
+    public static native int H5Pget_userblock(int plist, long[] size) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Pset_small_data_block_size reserves blocks of size bytes for the contiguous storage of the
+     * raw data portion of small datasets.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param size IN: Size of the blocks in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_small_data_block_size(int plist, long size)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Pget_small_data_block_size retrieves the size of a block of small data in a file creation
+     * property list.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param size OUT: Pointer to location to return block size.
+     * @return a non-negative value and the size of the user block; if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     */
+    public static native int H5Pget_small_data_block_size(int plist, long[] size)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Pset_sizes sets the byte size of the offsets and lengths used to address objects in an HDF5
+     * file.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param sizeof_addr IN: Size of an object offset in bytes.
+     * @param sizeof_size IN: Size of an object length in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_sizes(int plist, int sizeof_addr, int sizeof_size)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Pget_sizes retrieves the size of the offsets and lengths used in an HDF5 file. This
+     * function is only valid for file creation property lists.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param size OUT: the size of the offsets and length.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = sizeof_addr // offset size in bytes size[1] = sizeof_size // length size
+     *            in bytes
+     * 
+     * </pre>
+     * @return a non-negative value with the sizes initialized; if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     * @exception IllegalArgumentException - size is invalid.
+     */
+    public static native int H5Pget_sizes(int plist, int[] size) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_sym_k sets the size of parameters used to control the symbol table nodes.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param ik IN: Symbol table tree rank.
+     * @param lk IN: Symbol table node size.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_sym_k(int plist, int ik, int lk) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_sym_k retrieves the size of the symbol table B-tree 1/2 rank and the symbol table leaf
+     * node 1/2 size.
+     * 
+     * @param plist IN: Property list to query.
+     * @param size OUT: the symbol table's B-tree 1/2 rank and leaf node 1/2 size.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = ik // the symbol table's B-tree 1/2 rank size[1] = lk // leaf node 1/2
+     *            size
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     * @exception IllegalArgumentException - size is invalid.
+     */
+    public static native int H5Pget_sym_k(int plist, int[] size) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_istore_k sets the size of the parameter used to control the B-trees for indexing
+     * chunked datasets.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param ik IN: 1/2 rank of chunked storage B-tree.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_istore_k(int plist, int ik) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_istore_k queries the 1/2 rank of an indexed storage B-tree.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param ik OUT: Pointer to location to return the chunked storage B-tree 1/2 rank.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - ik array is null.
+     */
+    public static native int H5Pget_istore_k(int plist, int[] ik) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Pset_layout sets the type of storage used store the raw data for a dataset.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param layout IN: Type of storage layout for raw data.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_layout(int plist, int layout) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_layout returns the layout of the raw data for a dataset.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @return the layout type of a dataset creation property list if successful. Otherwise returns
+     *         H5D_LAYOUT_ERROR (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pget_layout(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pset_chunk sets the size of the chunks used to store a chunked layout dataset.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param ndims IN: The number of dimensions of each chunk.
+     * @param dim IN: An array containing the size of each chunk.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims array is null.
+     * @exception IllegalArgumentException - dims <=0
+     */
+    public static native int H5Pset_chunk(int plist, int ndims, byte[] dim)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    public static int H5Pset_chunk(final int plist, final int ndims, final long[] dim)
+            throws HDF5Exception, NullPointerException, IllegalArgumentException
+    {
+        if (dim == null)
+        {
+            return -1;
+        }
+
+        final byte[] thedims = HDFNativeData.longToByte(dim);
+
+        return H5Pset_chunk(plist, ndims, thedims);
+    }
+
+    /**
+     * H5Pget_chunk retrieves the size of chunks for the raw data of a chunked layout dataset.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param max_ndims IN: Size of the dims array.
+     * @param dims OUT: Array to store the chunk dimensions.
+     * @return chunk dimensionality successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims array is null.
+     * @exception IllegalArgumentException - max_ndims <=0
+     */
+    public static native int H5Pget_chunk(int plist, int max_ndims, long[] dims)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_alignment sets the alignment properties of a file access property list so that any
+     * file object >= THRESHOLD bytes will be aligned on an address which is a multiple of
+     * ALIGNMENT.
+     * 
+     * @param plist IN: Identifier for a file access property list.
+     * @param threshold IN: Threshold value.
+     * @param alignment IN: Alignment value.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_alignment(int plist, long threshold, long alignment)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Pget_alignment retrieves the current settings for alignment properties from a file access
+     * property list.
+     * 
+     * @param plist IN: Identifier of a file access property list.
+     * @param alignment OUT: threshold value and alignment value.
+     * 
+     *            <pre>
+     * 
+     *            alignment[0] = threshold // threshold value alignment[1] = alignment // alignment
+     *            value
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - aligment array is null.
+     * @exception IllegalArgumentException - aligment array is invalid.
+     */
+    public static native int H5Pget_alignment(int plist, long[] alignment)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_external adds an external file to the list of external files.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @param name IN: Name of an external file.
+     * @param offset IN: Offset, in bytes, from the beginning of the file to the location in the
+     *            file where the data starts.
+     * @param size IN: Number of bytes reserved in the file for the data.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Pset_external(int plist, String name, long offset, long size)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Pget_external_count returns the number of external files for the specified dataset.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @return the number of external files if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pget_external_count(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_external returns information about an external file.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @param idx IN: External file index.
+     * @param name_size IN: Maximum length of name array.
+     * @param name OUT: Name of the external file.
+     * @param size OUT: the offset value and the size of the external file data.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = offset // a location to return an offset value size[1] = size // a
+     *            location to return the size of // the external file data.
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception ArrayIndexOutOfBoundsException Fatal error on Copyback
+     * @exception ArrayStoreException Fatal error on Copyback
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name or size is null.
+     * @exception IllegalArgumentException - name_size <= 0 .
+     */
+    public static native int H5Pget_external(int plist, int idx, int name_size, String[] name,
+            long[] size) throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+            HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Pset_fill_value sets the fill value for a dataset creation property list.
+     * 
+     * @param plist_id IN: Property list identifier.
+     * @param type_id IN: The datatype identifier of value.
+     * @param value IN: The fill value.
+     * @return a non-negative value if successful
+     * @exception HDF5Exception - Error converting data array
+     */
+    public static native int H5Pset_fill_value(int plist_id, int type_id, byte[] value)
+            throws HDF5Exception;
+
+    /**
+     * H5Pget_fill_value queries the fill value property of a dataset creation property list. <b>NOT
+     * IMPLEMENTED YET</B>
+     * 
+     * @param plist_id IN: Property list identifier.
+     * @param type_id IN: The datatype identifier of value.
+     * @param value IN: The fill value.
+     * @return a non-negative value if successful
+     */
+    public static native int H5Pget_fill_value(int plist_id, int type_id, byte[] value)
+            throws HDF5Exception;
+
+    /**
+     * H5Pset_filter adds the specified filter and corresponding properties to the end of an output
+     * filter pipeline.
+     * 
+     * @param plist IN: Property list identifier.
+     * @param filter IN: Filter to be added to the pipeline.
+     * @param flags IN: Bit vector specifying certain general properties of the filter.
+     * @param cd_nelmts IN: Number of elements in cd_values
+     * @param cd_values IN: Auxiliary data for the filter.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_filter(int plist, int filter, int flags, int cd_nelmts,
+            int[] cd_values) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_nfilters returns the number of filters defined in the filter pipeline associated with
+     * the property list plist.
+     * 
+     * @param plist IN: Property list identifier.
+     * @return the number of filters in the pipeline if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pget_nfilters(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_filter returns information about a filter, specified by its filter number, in a filter
+     * pipeline, specified by the property list with which it is associated.
+     * 
+     * @param plist IN: Property list identifier.
+     * @param filter_number IN: Sequence number within the filter pipeline of the filter for which
+     *            information is sought.
+     * @param flags OUT: Bit vector specifying certain general properties of the filter.
+     * @param cd_nelmts IN/OUT: Number of elements in cd_values
+     * @param cd_values OUT: Auxiliary data for the filter.
+     * @param namelen IN: Anticipated number of characters in name.
+     * @param name OUT: Name of the filter.
+     * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR
+     *         (-1).
+     * @exception ArrayIndexOutOfBoundsException Fatal error on Copyback
+     * @exception ArrayStoreException Fatal error on Copyback
+     * @exception NullPointerException - name or an array is null.
+     */
+    public static native int H5Pget_filter(int plist, int filter_number, int[] flags,
+            int[] cd_nelmts, int[] cd_values, int namelen, String[] name)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException;
+
+    // REMOVED in HDF5.1.4
+    //
+    // /**
+    // * H5Pget_driver returns the identifier of the low-level
+    // * file driver.
+    // * <p>
+    // * Valid identifiers are:
+    // * <UL>
+    // * <LI>
+    // * H5F_LOW_STDIO (0)
+    // * </LI>
+    // * <LI>
+    // * H5F_LOW_SEC2 (1)
+    // * </LI>
+    // * <LI>
+    // * H5F_LOW_MPIO (2)
+    // * </LI>
+    // * <LI>
+    // * H5F_LOW_CORE (3)
+    // * </LI>
+    // * <LI>
+    // * H5F_LOW_SPLIT (4)
+    // * </LI>
+    // * <LI>
+    // * H5F_LOW_FAMILY (5)
+    // * </LI>
+    // * </UL>
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // *
+    // * @return a low-level driver identifier if successful. Otherwise returns
+    // * H5F_LOW_ERROR (-1).
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native int H5Pget_driver(int plist)
+    // throws HDF5LibraryException;
+    //
+    // /**
+    // * H5Pset_stdio sets the low level file driver to use the
+    // * functions declared in the stdio.h file: fopen(), fseek()
+    // * or fseek64(), fread(), fwrite(), and fclose().
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // *
+    // * @return a non-negative value if successful
+    // *
+    // **/
+    // public static native int H5Pset_stdio(int plist)
+    // throws HDF5LibraryException;
+    //
+    // /**
+    // * H5Pget_stdio checks to determine whether the file access
+    // * property list is set to the stdio driver.
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // * @return true if the file access property list is set to
+    // * the stdio driver. Otherwise returns a negative value.
+    // *
+    // **/
+    // public static native boolean H5Pget_stdio(int plist);
+    //
+    // /**
+    // * H5Pset_sec2 sets the low-level file driver to use the
+    // * functions declared in the unistd.h file: open(), lseek()
+    // * or lseek64(), read(), write(), and close().
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // * @return a non-negative value if successful
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native int H5Pset_sec2(int plist)
+    // throws HDF5LibraryException;
+    //
+    //
+    // /**
+    // * H5Pget_sec2 checks to determine whether the file access
+    // * property list is set to the sec2 driver.
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // * @return true if the file access property list is set to
+    // * the sec2 driver. Otherwise returns a negative value.
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native boolean H5Pget_sec2(int plist)
+    // throws HDF5LibraryException;
+    //
+    // /**
+    // * H5Pset_core sets the low-level file driver to use malloc() and
+    // * free().
+    // *
+    // * @param plist IN: Identifier of a file access property list.
+    // * @param increment IN: File block size in bytes.
+    // *
+    // * @return a non-negative value if successful
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native int H5Pset_core(int plist, int increment)
+    // throws HDF5LibraryException;
+    //
+    //
+    // /**
+    // * H5Pget_core checks to determine whether the file access
+    // * property list is set to the core driver.
+    // *
+    // * @param plist IN: Identifier of the file access property list.
+    // * @param increment OUT: A location to return the file block size
+    // * @return true if the file access property list is set to
+    // * the core driver.
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native boolean H5Pget_core(int plist,
+    // int[] increment)
+    // throws HDF5LibraryException;
+    //
+    //
+    // /**
+    // * H5Pset_split sets the low-level driver to split meta data
+    // * from raw data, storing meta data in one file and raw data
+    // * in another file.
+    // *
+    // * @param plist IN: Identifier of the file access property list.
+    // * @param meta_ext IN: Name of the extension for the metafile
+    // * filename. Recommended default value: <i>.meta</i>.
+    // * @param meta_plist IN: Identifier of the meta file access
+    // * property list.
+    // * @param raw_ext IN: Name extension for the raw file filename.
+    // * Recommended default value: <i>.raw</i>.
+    // * @param raw_plist IN: Identifier of the raw file access
+    // * property list.
+    // * @return a non-negative value if successful
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // * @exception NullPointerException - a string is null.
+    // **/
+    // public static native int H5Pset_split(int plist, String meta_ext,
+    // int meta_plist, String raw_ext, int raw_plist)
+    // throws HDF5LibraryException,
+    // NullPointerException;
+    //
+    //
+    // /**
+    // * H5Pget_split checks to determine whether the file access
+    // * property list is set to the split driver.
+    // *
+    // * @param plist IN: Identifier of the file access property list.
+    // * @param meta_ext_size IN: Number of characters of the
+    // * meta file extension to be copied to the meta_ext buffer.
+    // * @param meta_ext IN: Meta file extension.
+    // * @param *meta_properties OUT: A copy of the meta file
+    // * access property list.
+    // * @param raw_ext_size IN: Number of characters of the
+    // * raw file extension to be copied to the raw_ext buffer.
+    // * @param raw_ext OUT: Raw file extension.
+    // * @param *raw_properties OUT: A copy of the raw file
+    // * access property list.
+    // *
+    // * @return true if the file access property list is set to
+    // * the split driver.
+    // *
+    // * @exception ArrayIndexOutOfBoundsException JNI error
+    // * writing back array
+    // * @exception ArrayStoreException JNI error writing back array
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // * @exception NullPointerException - a string or array is null.
+    // **/
+    // public static native boolean H5Pget_split(int plist,
+    // int meta_ext_size, String[] meta_ext,
+    // int[] meta_properties, int raw_ext_size,
+    // String[] raw_ext, int[] raw_properties)
+    // throws ArrayIndexOutOfBoundsException,
+    // ArrayStoreException,
+    // HDF5LibraryException,
+    // NullPointerException;
+    //
+    // /**
+    // * H5Pset_family sets the file access properties to use the
+    // * family driver; any previously defined driver properties
+    // * are erased from the property list.
+    // *
+    // * @param plist IN: Identifier of the file access property list.
+    // * @param memb_size IN: Logical size, in bytes, of each
+    // * family member.
+    // * @param memb_plist IN: Identifier of the file access
+    // * property list for each member of the family.
+    // * @return a non-negative value if successful
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // **/
+    // public static native int H5Pset_family(int plist, long memb_size,
+    // int memb_plist)
+    // throws HDF5LibraryException;
+    //
+    //
+    // /**
+    // * H5Pget_family checks to determine whether the file access
+    // * property list is set to the family driver.
+    // *
+    // * @param plist IN: Identifier of the file access property list.
+    // * @param memb_size OUT: Logical size, in bytes, of each
+    // * family member.
+    // * @param *memb_plist OUT: Identifier of the file access
+    // * property list for each member of the family.
+    // *
+    // * @return a non-negative value if the file access property
+    // * list is set to the family driver.
+    // *
+    // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+    // * @exception NullPointerException - an array is null.
+    // **/
+    // public static native int H5Pget_family(int tid, long[] memb_size,
+    // int[] memb_plist)
+    // throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Pset_cache sets the number of elements (objects) in the meta data cache and the total
+     * number of bytes in the raw data chunk cache.
+     * 
+     * @param plist IN: Identifier of the file access property list.
+     * @param mdc_nelmts IN: Number of elements (objects) in the meta data cache.
+     * @param rdcc_nbytes IN: Total size of the raw data chunk cache, in bytes.
+     * @param rdcc_w0 IN: Preemption policy.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_cache(int plist, int mdc_nelmts, int rdcc_nelmts,
+            int rdcc_nbytes, double rdcc_w0) throws HDF5LibraryException;
+
+    /**
+     * Retrieves the maximum possible number of elements in the meta data cache and the maximum
+     * possible number of bytes and the RDCC_W0 value in the raw data chunk cache.
+     * 
+     * @param plist IN: Identifier of the file access property list.
+     * @param mdc_nelmts IN/OUT: Number of elements (objects) in the meta data cache.
+     * @param rdcc_nbytes IN/OUT: Total size of the raw data chunk cache, in bytes.
+     * @param rdcc_w0 IN/OUT: Preemption policy.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an array is null.
+     */
+    public static native int H5Pget_cache(int plist, int[] mdc_nelmts, int[] rdcc_nelmts,
+            int[] rdcc_nbytes, double[] rdcc_w0) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Pset_buffer sets type conversion and background buffers. status to TRUE or FALSE. Given a
+     * dataset transfer property list, H5Pset_buffer sets the maximum size for the type conversion
+     * buffer and background buffer and optionally supplies pointers to application-allocated
+     * buffers. If the buffer size is smaller than the entire amount of data being transferred
+     * between the application and the file, and a type conversion buffer or background buffer is
+     * required, then strip mining will be used. Note that there are minimum size requirements for
+     * the buffer. Strip mining can only break the data up along the first dimension, so the buffer
+     * must be large enough to accommodate a complete slice that encompasses all of the remaining
+     * dimensions. For example, when strip mining a 100x200x300 hyperslab of a simple data space,
+     * the buffer must be large enough to hold 1x200x300 data elements. When strip mining a
+     * 100x200x300x150 hyperslab of a simple data space, the buffer must be large enough to hold
+     * 1x200x300x150 data elements. If tconv and/or bkg are null pointers, then buffers will be
+     * allocated and freed during the data transfer.
+     * 
+     * @param plist Identifier for the dataset transfer property list.
+     * @param size Size, in bytes, of the type conversion and background buffers.
+     * @param tconv byte array of application-allocated type conversion buffer.
+     * @param bkg byte array of application-allocated background buffer.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static native int H5Pset_buffer(int plist, int size, byte[] tconv, byte[] bkg)
+            throws HDF5LibraryException, IllegalArgumentException;
+
+    /**
+     * HH5Pget_buffer gets type conversion and background buffers. Returns buffer size, in bytes, if
+     * successful; otherwise 0 on failure.
+     * 
+     * @param plist Identifier for the dataset transfer property list.
+     * @param tconv byte array of application-allocated type conversion buffer.
+     * @param bkg byte array of application-allocated background buffer.
+     * @return buffer size, in bytes, if successful; otherwise 0 on failure
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static native int H5Pget_buffer(int plist, byte[] tconv, byte[] bkg)
+            throws HDF5LibraryException, IllegalArgumentException;
+
+    /**
+     * H5Pset_preserve sets the dataset transfer property list status to TRUE or FALSE.
+     * 
+     * @param plist IN: Identifier for the dataset transfer property list.
+     * @param status IN: Status of for the dataset transfer property list.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static native int H5Pset_preserve(int plist, boolean status)
+            throws HDF5LibraryException, IllegalArgumentException;
+
+    /**
+     * H5Pget_preserve checks the status of the dataset transfer property list.
+     * 
+     * @param plist IN: Identifier for the dataset transfer property list.
+     * @return TRUE or FALSE if successful; otherwise returns a negative value
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pget_preserve(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pset_deflate sets the compression method for a dataset.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @param level IN: Compression level.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_deflate(int plist, int level) throws HDF5LibraryException;
+
+    /**
+     * H5Pset_nbit sets the compression method for a dataset to n-bits.
+     * <p>
+     * Keeps only n-bits from an integer or float value.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_nbit(int plist) throws HDF5LibraryException;
+
+    /**
+     * H5Pset_scaleoffset sets the compression method for a dataset to scale_offset.
+     * <p>
+     * Generally speaking, Scale-Offset compression performs a scale and/or offset operation on each
+     * data value and truncates the resulting value to a minimum number of bits (MinBits) before
+     * storing it. The current Scale-Offset filter supports integer and floating-point datatype.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @param scale_type IN: One of {@link HDF5Constants#H5Z_SO_INT},
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_DSCALE} or
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_ESCALE}. Note that
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_ESCALE} is not implemented as of HDF5 1.8.2.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_scaleoffset(int plist, int scale_type, int scale_factor)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Pset_gc_references Sets the flag for garbage collecting references for the file. Default
+     * value for garbage collecting references is off.
+     * 
+     * @param fapl_id IN File access property list
+     * @param gc_ref IN set GC on (true) or off (false)
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_gc_references(int fapl_id, boolean gc_ref)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Pget_gc_references Returns the current setting for the garbage collection refernces
+     * property from a file access property list.
+     * <p>
+     * Note: this routine changed name with HDF5.1.2.2. If using an earlier version, use 'configure
+     * --enable-hdf5_1_2_1' so this routine will link to the old name.
+     * 
+     * @param fapl_id IN File access property list
+     * @param gc_ref OUT GC is on (true) or off (false)
+     * @return non-negative if succeed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     */
+    public static native int H5Pget_gc_references(int fapl_id, boolean[] gc_ref)
+            throws HDF5LibraryException, NullPointerException;
+
+    /*
+     * Earlier versions of the HDF5 library had a different name. This is included as an alias.
+     */
+    public static int H5Pget_gc_reference(final int fapl_id, final boolean[] gc_ref)
+            throws HDF5LibraryException, NullPointerException
+    {
+        return H5Pget_gc_references(fapl_id, gc_ref);
+    }
+
+    /**
+     * H5Pset_btree_ratio Sets B-tree split ratios for a dataset transfer property list. The split
+     * ratios determine what percent of children go in the first node when a node splits.
+     * 
+     * @param plist_id IN Dataset transfer property list
+     * @param left IN split ratio for leftmost nodes
+     * @param right IN split ratio for righttmost nodes
+     * @param middle IN split ratio for all other nodes
+     * @return non-negative if succeed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Pset_btree_ratios(int plist_id, double left, double middle,
+            double right) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_btree_ratio Get the B-tree split ratios for a dataset transfer property list.
+     * 
+     * @param plist_id IN Dataset transfer property list
+     * @param left OUT split ratio for leftmost nodes
+     * @param right OUT split ratio for righttmost nodes
+     * @param middle OUT split ratio for all other nodes
+     * @return non-negative if succeed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     */
+    public static native int H5Pget_btree_ratios(int plist_id, double[] left, double[] middle,
+            double[] right) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Pset_create_intermediate_group pecifies in property list whether to create missing
+     * intermediate groups.
+     * <p>
+     * H5Pset_create_intermediate_group specifies whether to set the link creation property list
+     * lcpl_id so that calls to functions that create objects in groups different from the current
+     * working group will create intermediate groups that may be missing in the path of a new or
+     * moved object.
+     * <p>
+     * Functions that create objects in or move objects to a group other than the current working
+     * group make use of this property. H5Gcreate_anon and H5Lmove are examles of such functions.
+     * <p>
+     * If crt_intermed_group is <code>true</code>, the H5G_CRT_INTMD_GROUP will be added to lcpl_id
+     * (if it is not already there). Missing intermediate groups will be created upon calls to
+     * functions such as those listed above that use lcpl_id.
+     * <p>
+     * If crt_intermed_group is <code>false</code>, the H5G_CRT_INTMD_GROUP, if present, will be
+     * removed from lcpl_id. Missing intermediate groups will not be created upon calls to functions
+     * such as those listed above that use lcpl_id.
+     * 
+     * @param lcpl_id Link creation property list identifier
+     * @param crt_intermed_group Flag specifying whether to create intermediate groups upon the
+     *            creation of an object
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native void H5Pset_create_intermediate_group(int lcpl_id,
+            boolean crt_intermed_group) throws HDF5LibraryException;
+
+    /**
+     * Determines whether property is set to enable creating missing intermediate groups.
+     * 
+     * @return <code>true</code> if intermediate groups are created, <code>false</code> otherwise.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native boolean H5Pget_create_intermediate_group(int lcpl_id)
+            throws HDF5LibraryException;
+
+    /**
+     * Returns a dataset transfer property list (<code>H5P_DATASET_XFER</code>) that has a
+     * conversion exception handler set which abort conversions that triggers overflows.
+     */
+    public static native int H5Pcreate_xfer_abort_overflow();
+
+    /**
+     * Returns a dataset transfer property list (<code>H5P_DATASET_XFER</code>) that has a
+     * conversion exception handler set which aborts all conversions.
+     */
+    public static native int H5Pcreate_xfer_abort();
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5R: Reference Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    private static native int H5Rcreate(byte[] ref, int loc_id, String name, int ref_type,
+            int space_id) throws HDF5LibraryException, NullPointerException,
+            IllegalArgumentException;
+
+    /**
+     * H5Rcreate creates the reference, ref, of the type specified in ref_type, pointing to the
+     * object name located at loc_id.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param name IN: Name of object at location loc_id.
+     * @param ref_type IN: Type of reference.
+     * @param space_id IN: Dataspace identifier with selection.
+     * @return the reference (byte[]) if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception IllegalArgumentException - an input array is invalid.
+     */
+    public static byte[] H5Rcreate(final int loc_id, final String name, final int ref_type,
+            final int space_id) throws HDF5LibraryException, NullPointerException,
+            IllegalArgumentException
+    {
+        /* These sizes are correct for HDF5.1.2 */
+        int ref_size = 8;
+        if (ref_type == HDF5Constants.H5R_DATASET_REGION)
+        {
+            ref_size = 12;
+        }
+        final byte rbuf[] = new byte[ref_size];
+
+        /* will raise an exception if fails */
+        H5Rcreate(rbuf, loc_id, name, ref_type, space_id);
+
+        return rbuf;
+    }
+
+    /**
+     * H5Rcreate creates the object references, pointing to the object names located at loc_id.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param name IN: Names of objects at location loc_id.
+     * @return the reference (long[]) if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception IllegalArgumentException - an input array is invalid.
+     */
+    public static native long[] H5Rcreate(final int loc_id, final String[] name)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * Given a reference to some object, H5Rdereference opens that object and return an identifier.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param ref_type IN: The reference type of ref.
+     * @param ref IN: reference to an object
+     * @return valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static native int H5Rdereference(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * Given a reference to some object, H5Rdereference opens that object and return an identifier.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param ref IN: reference to an object
+     * @return valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static native int H5Rdereference(int loc_id, long ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * Given a reference to an object ref, H5Rget_region creates a copy of the dataspace of the
+     * dataset pointed to and defines a selection in the copy which is the region pointed to.
+     * 
+     * @param loc_id IN: loc_id of the reference object.
+     * @param ref_type IN: The reference type of ref.
+     * @param ref OUT: the reference to the object and region
+     * @return a valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static native int H5Rget_region(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * Given a reference to an object, H5Rget_obj_type returns the type of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref_type Type of reference to query.
+     * @param ref The reference.
+     * @return a valid identifier if successful; otherwise a negative value is returned to signal
+     *         failure.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static native int H5Rget_obj_type(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * Given a reference to an object, H5Rget_name returns the name (path) of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref_type Type of reference to query.
+     * @param ref The reference.
+     * @return The path of the object being pointed to, or an empty string, if the object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static native String H5Rget_name(int loc_id, int ref_type, byte[] ref);
+
+    /**
+     * Given a reference to an object, H5Rget_name returns the name (path) of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref The reference.
+     * @return The path of the object being pointed to, or an empty string, if the object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static native String H5Rget_name(int loc_id, long ref);
+
+    /**
+     * Given an array of object references (ref), H5Rget_name returns the names (paths) of the
+     * objects pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref The references.
+     * @return The paths of the objects being pointed to, or an empty string, if an object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static native String[] H5Rget_name(int loc_id, long[] ref);
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5S: Dataspace Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Screate creates a new dataspace of a particular type.
+     * 
+     * @param type The type of dataspace to be created.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Screate(int type) throws HDF5LibraryException;
+
+    /**
+     * H5Screate_simple creates a new simple data space and opens it for access.
+     * 
+     * @param rank Number of dimensions of dataspace.
+     * @param dims An array of the size of each dimension.
+     * @param maxdims An array of the maximum size of each dimension.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims or maxdims is null.
+     */
+    public static native int H5Screate_simple(int rank, byte[] dims, byte[] maxdims)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static int H5Screate_simple(final int rank, final long[] dims, final long[] maxdims)
+            throws HDF5Exception, NullPointerException
+    {
+        if (dims == null)
+        {
+            return -1;
+        }
+
+        final byte[] dimsAsByteArray = HDFNativeData.longToByte(dims);
+        final byte[] maxDimsAsByteArray =
+                (maxdims != null) ? HDFNativeData.longToByte(maxdims) : null;
+
+        return H5Screate_simple(rank, dimsAsByteArray, maxDimsAsByteArray);
+    }
+
+    /**
+     * H5Scopy creates a new dataspace which is an exact copy of the dataspace identified by
+     * space_id.
+     * 
+     * @param space_id Identifier of dataspace to copy.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Scopy(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sselect_elements selects array elements to be included in the selection for the space_id
+     * dataspace.
+     * 
+     * @param space_id Identifier of the dataspace.
+     * @param op operator specifying how the new selection is combined.
+     * @param num_elements Number of elements to be selected.
+     * @param coord A 2-dimensional array specifying the coordinates of the elements.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sselect_elements(int space_id, int op, int num_elements, byte[] coord)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Sselect_all selects the entire extent of the dataspace space_id.
+     * 
+     * @param space_id IN: The identifier of the dataspace to be selected.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sselect_all(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sselect_none resets the selection region for the dataspace space_id to include no elements.
+     * 
+     * @param space_id IN: The identifier of the dataspace to be reset.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sselect_none(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sselect_valid verifies that the selection for the dataspace.
+     * 
+     * @param space_id The identifier for the dataspace in which the selection is being reset.
+     * @return true if the selection is contained within the extent and FALSE if it is not or is an
+     *         error.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native boolean H5Sselect_valid(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sget_simple_extent_npoints determines the number of elements in a dataspace.
+     * 
+     * @param space_id ID of the dataspace object to query
+     * @return the number of elements in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Sget_simple_extent_npoints(int space_id)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Sget_select_npoints determines the number of elements in the current selection of a
+     * dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @return the number of elements in the selection if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Sget_select_npoints(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sget_simple_extent_ndims determines the dimensionality (or rank) of a dataspace.
+     * 
+     * @param space_id Identifier of the dataspace
+     * @return the number of dimensions in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sget_simple_extent_ndims(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sget_simple_extent_dims returns the size and maximum sizes of each dimension of a dataspace
+     * through the dims and maxdims parameters.
+     * 
+     * @param space_id IN: Identifier of the dataspace object to query
+     * @param dims OUT: Pointer to array to store the size of each dimension.
+     * @param maxdims OUT: Pointer to array to store the maximum size of each dimension.
+     * @return the number of dimensions in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims or maxdims is null.
+     */
+    public static native int H5Sget_simple_extent_dims(int space_id, long[] dims, long[] maxdims)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Sget_simple_extent_type queries a dataspace to determine the current class of a dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @return a dataspace class name if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sget_simple_extent_type(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sset_extent_simple sets or resets the size of an existing dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @param rank Rank, or dimensionality, of the dataspace.
+     * @param current_size Array containing current size of dataspace.
+     * @param maximum_size Array containing maximum size of dataspace.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sset_extent_simple(int space_id, int rank, byte[] current_size,
+            byte[] maximum_size) throws HDF5LibraryException, NullPointerException;
+
+    public static int H5Sset_extent_simple(final int space_id, final int rank,
+            final long[] currentSize, final long[] maxSize) throws HDF5Exception,
+            NullPointerException
+    {
+        if (currentSize == null)
+        {
+            return -1;
+        }
+
+        final byte[] currentSizeAsByteArray = HDFNativeData.longToByte(currentSize);
+        final byte[] maxSizeAsByteArray =
+                (maxSize != null) ? HDFNativeData.longToByte(maxSize) : null;
+
+        return H5Screate_simple(rank, currentSizeAsByteArray, maxSizeAsByteArray);
+    }
+
+    /**
+     * H5Sis_simple determines whether a dataspace is a simple dataspace.
+     * 
+     * @param space_id Identifier of the dataspace to query
+     * @return true if is a simple dataspace
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native boolean H5Sis_simple(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Soffset_simple sets the offset of a simple dataspace space_id.
+     * 
+     * @param space_id IN: The identifier for the dataspace object to reset.
+     * @param offset IN: The offset at which to position the selection.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - offset array is null.
+     */
+    public static native int H5Soffset_simple(int space_id, byte[] offset)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static int H5Soffset_simple(final int space_id, final long[] offset)
+            throws HDF5Exception, NullPointerException
+    {
+        if (offset == null)
+        {
+            return -1;
+        }
+
+        final byte[] offsetAsByteArray = HDFNativeData.longToByte(offset);
+
+        return H5Soffset_simple(space_id, offsetAsByteArray);
+    }
+
+    /**
+     * H5Sextent_copy copies the extent from source_space_id to dest_space_id. This action may
+     * change the type of the dataspace.
+     * 
+     * @param dest_space_id IN: The identifier for the dataspace from which the extent is copied.
+     * @param source_space_id IN: The identifier for the dataspace to which the extent is copied.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sextent_copy(int dest_space_id, int source_space_id)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Sset_extent_none removes the extent from a dataspace and sets the type to H5S_NONE.
+     * 
+     * @param space_id The identifier for the dataspace from which the extent is to be removed.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sset_extent_none(int space_id) throws HDF5LibraryException;
+
+    /**
+     * H5Sselect_hyperslab selects a hyperslab region to add to the current selected region for the
+     * dataspace specified by space_id. The start, stride, count, and block arrays must be the same
+     * size as the rank of the dataspace.
+     * 
+     * @param space_id IN: Identifier of dataspace selection to modify
+     * @param op IN: Operation to perform on current selection.
+     * @param start IN: Offset of start of hyperslab
+     * @param count IN: Number of blocks included in hyperslab.
+     * @param stride IN: Hyperslab stride.
+     * @param block IN: Size of block in hyperslab.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception NullPointerException - an input array is invalid.
+     */
+    public static native int H5Sselect_hyperslab(int space_id, int op, byte[] start, byte[] stride,
+            byte[] count, byte[] block) throws HDF5LibraryException, NullPointerException,
+            IllegalArgumentException;
+
+    public static int H5Sselect_hyperslab(final int space_id, final int op, final long[] start,
+            final long[] stride, final long[] count, final long[] block) throws HDF5Exception,
+            NullPointerException, IllegalArgumentException
+    {
+        final byte[] startAsByteArray = (start != null) ? HDFNativeData.longToByte(start) : null;
+        final byte[] countAsByteArray = (count != null) ? HDFNativeData.longToByte(count) : null;
+        final byte[] strideAsByteArray = (stride != null) ? HDFNativeData.longToByte(stride) : null;
+        final byte[] blockAsByteArray = (block != null) ? HDFNativeData.longToByte(block) : null;
+
+        return H5Sselect_hyperslab(space_id, op, startAsByteArray, strideAsByteArray,
+                countAsByteArray, blockAsByteArray);
+    }
+
+    /**
+     * H5Sclose releases a dataspace.
+     * 
+     * @param space_id Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Sclose(int space_id) throws HDF5LibraryException;
+
+    // --//
+
+    // following static native functions are missing from HDF5 (version 1.0.1) RM
+
+    /**
+     * H5Sget_select_hyper_nblocks returns the number of hyperslab blocks in the current dataspace
+     * selection.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Sget_select_hyper_nblocks(int spaceid) throws HDF5LibraryException;
+
+    /**
+     * H5Sget_select_elem_npoints returns the number of element points in the current dataspace
+     * selection.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Sget_select_elem_npoints(int spaceid) throws HDF5LibraryException;
+
+    /**
+     * H5Sget_select_hyper_blocklist returns an array of hyperslab blocks. The block coordinates
+     * have the same dimensionality (rank) as the dataspace they are located within. The list of
+     * blocks is formatted as follows:
+     * 
+     * <pre>
+     * 
+     * <"start" coordinate>, immediately followed by <"opposite" corner
+     * coordinate>, followed by the next "start" and "opposite" coordinates,
+     * etc. until all of the selected blocks have been listed.
+     * 
+     * </pre>
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param startblock first block to retrieve
+     * @param numblocks number of blocks to retrieve
+     * @param buf returns blocks startblock to startblock+num-1, each block is <i>rank</i> * 2
+     *            (corners) longs.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static native int H5Sget_select_hyper_blocklist(int spaceid, long startblock,
+            long numblocks, long[] buf) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Sget_select_elem_pointlist returns an array of of element points in the current dataspace
+     * selection. The point coordinates have the same dimensionality (rank) as the dataspace they
+     * are located within, one coordinate per point.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param startpoint first point to retrieve
+     * @param numpoints number of points to retrieve
+     * @param buf returns points startblock to startblock+num-1, each points is <i>rank</i> longs.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static native int H5Sget_select_elem_pointlist(int spaceid, long startpoint,
+            long numpoints, long[] buf) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Sget_select_bounds retrieves the coordinates of the bounding box containing the current
+     * selection and places them into user-supplied buffers.
+     * <P>
+     * The start and end buffers must be large enough to hold the dataspace rank number of
+     * coordinates.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param start coordinates of lowest corner of bounding box.
+     * @param end coordinates of highest corner of bounding box.
+     * @return a non-negative value if successful,with start and end initialized.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - start or end is null.
+     */
+    public static native int H5Sget_select_bounds(int spaceid, long[] start, long[] end)
+            throws HDF5LibraryException, NullPointerException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5T: Datatype Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Topen opens a named datatype at the location specified by loc_id and return an identifier
+     * for the datatype.
+     * 
+     * @param loc_id A file, group, or datatype identifier.
+     * @param name A datatype name.
+     * @param access_plist_id Datatype access property list identifier.
+     * @return a named datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Topen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tcommit commits a transient datatype (not immutable) to a file, turned it into a named
+     * datatype.
+     * 
+     * @param loc_id A file or group identifier.
+     * @param name A datatype name.
+     * @param type_id A datatype identifier.
+     * @param link_create_plist_id Link creation property list.
+     * @param dtype_create_plist_id Datatype creation property list.
+     * @param dtype_access_plist_id Datatype access property list.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tcommit(int loc_id, String name, int type_id,
+            int link_create_plist_id, int dtype_create_plist_id, int dtype_access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tcommitted queries a type to determine whether the type specified by the type identifier is
+     * a named type or a transient type.
+     * 
+     * @param type Datatype identifier.
+     * @return true if successfully committed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native boolean H5Tcommitted(int type) throws HDF5LibraryException;
+
+    /**
+     * H5Tcreate creates a new dataype of the specified class with the specified number of bytes.
+     * 
+     * @param dclass Class of datatype to create.
+     * @param size The number of bytes in the datatype to create.
+     * @return datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tcreate(int dclass, int size) throws HDF5LibraryException;
+
+    /**
+     * H5Tcopy copies an existing datatype. The returned type is always transient and unlocked.
+     * 
+     * @param type_id Identifier of datatype to copy. Can be a datatype identifier, a predefined
+     *            datatype (defined in H5Tpublic.h), or a dataset Identifier.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tcopy(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tequal determines whether two datatype identifiers refer to the same datatype.
+     * 
+     * @param type_id1 Identifier of datatype to compare.
+     * @param type_id2 Identifier of datatype to compare.
+     * @return true if the datatype identifiers refer to the same datatype, else FALSE.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native boolean H5Tequal(int type_id1, int type_id2) throws HDF5LibraryException;
+
+    /**
+     * H5Tlock locks the datatype specified by the type_id identifier, making it read-only and
+     * non-destrucible.
+     * 
+     * @param type_id Identifier of datatype to lock.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tlock(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_class returns the datatype class identifier.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return datatype class identifier if successful; otherwise H5T_NO_CLASS (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_class(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_size returns the size of a datatype in bytes as an int value.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the size of the datatype in bytes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library, or if the size of the data
+     *                type exceeds an int
+     */
+    public static native int H5Tget_size(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_size returns the size of a datatype in bytes as a long value.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the size of the datatype in bytes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Tget_size_long(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_size sets the total size in bytes, size, for an atomic datatype (this operation is not
+     * permitted on compound datatypes).
+     * 
+     * @param type_id Identifier of datatype to change size.
+     * @param size Size in bytes to modify datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_size(int type_id, int size) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_order returns the byte order of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a byte order constant if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_order(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_order sets the byte ordering of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param order Byte ordering constant.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_order(int type_id, int order) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_precision returns the precision of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the number of significant bits if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_precision(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_precision sets the precision of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param precision Number of bits of precision for datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_precision(int type_id, int precision)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Tget_offset retrieves the bit offset of the first significant bit.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a positive offset value if successful; otherwise 0.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_offset(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_offset sets the bit offset of the first significant bit.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param offset Offset of first significant bit.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_offset(int type_id, int offset) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_pad retrieves the padding type of the least and most-significant bit padding.
+     * 
+     * @param type_id IN: Identifier of datatype to query.
+     * @param pad OUT: locations to return least-significant and most-significant bit padding type.
+     * 
+     *            <pre>
+     * 
+     *            pad[0] = lsb // least-significant bit padding type pad[1] = msb //
+     *            most-significant bit padding type
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - pad is null.
+     */
+    public static native int H5Tget_pad(int type_id, int[] pad) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Tset_pad sets the least and most-significant bits padding types.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param lsb Padding type for least-significant bits.
+     * @param msb Padding type for most-significant bits.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_pad(int type_id, int lsb, int msb) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_sign retrieves the sign type for an integer type.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid sign type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_sign(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_sign sets the sign proprety for an integer type.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param sign Sign type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_sign(int type_id, int sign) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_fields retrieves information about the locations of the various bit fields of a
+     * floating point datatype.
+     * 
+     * @param type_id IN: Identifier of datatype to query.
+     * @param fields OUT: location of size and bit-position.
+     * 
+     *            <pre>
+     * 
+     *            fields[0] = spos OUT: location to return size of in bits. fields[1] = epos OUT:
+     *            location to return exponent bit-position. fields[2] = esize OUT: location to
+     *            return size of exponent in bits. fields[3] = mpos OUT: location to return mantissa
+     *            bit-position. fields[4] = msize OUT: location to return size of mantissa in bits.
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - fileds is null.
+     * @exception IllegalArgumentException - fileds array is invalid.
+     */
+    public static native int H5Tget_fields(int type_id, int[] fields) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param spos Size position.
+     * @param epos Exponent bit position.
+     * @param esize Size of exponent in bits.
+     * @param mpos Mantissa bit position.
+     * @param msize Size of mantissa in bits.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_fields(int type_id, int spos, int epos, int esize, int mpos,
+            int msize) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_ebias retrieves the exponent bias of a floating-point type.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the bias if successful; otherwise 0.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_ebias(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_ebias sets the exponent bias of a floating-point type.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param ebias Exponent bias value.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_ebias(int type_id, int ebias) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_norm retrieves the mantissa normalization of a floating-point datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid normalization type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_norm(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_norm sets the mantissa normalization of a floating-point datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param norm Mantissa normalization type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_norm(int type_id, int norm) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_inpad retrieves the internal padding type for unused bits in floating-point datatypes.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid padding type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_inpad(int type_id) throws HDF5LibraryException;
+
+    /**
+     * If any internal bits of a floating point type are unused (that is, those significant bits
+     * which are not part of the sign, exponent, or mantissa), then H5Tset_inpad will be filled
+     * according to the value of the padding value property inpad.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param inpad Padding type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_inpad(int type_id, int inpad) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_cset retrieves the character set type of a string datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid character set type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_cset(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_cset the character set to be used.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param cset Character set type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_cset(int type_id, int cset) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_strpad retrieves the string padding method for a string datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid string padding type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_strpad(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_strpad defines the storage mechanism for the string.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param strpad String padding type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_strpad(int type_id, int strpad) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_nmembers retrieves the number of fields a compound datatype has.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return number of members datatype has if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_nmembers(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_member_name retrieves the name of a field of a compound datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field name to retrieve.
+     * @return a valid pointer if successful; otherwise null.
+     */
+    public static native String H5Tget_member_name(int type_id, int field_idx);
+
+    /**
+     * H5Tget_member_index retrieves the index of a field of a compound datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_name Field name of the field index to retrieve.
+     * @return if field is defined, the index; else negative.
+     */
+    public static native int H5Tget_member_index(int type_id, String field_name);
+
+    /**
+     * H5Tget_member_class returns the datatype of the specified member.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field type to retrieve.
+     * @return the identifier of a copy of the datatype of the field if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_member_class(int type_id, int field_idx)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Tget_member_type returns the datatype of the specified member.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field type to retrieve.
+     * @return the identifier of a copy of the datatype of the field if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_member_type(int type_id, int field_idx)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Tget_member_offset returns the byte offset of the specified member of the compound
+     * datatype. This is the byte offset in the HDF-5 file/library, NOT the offset of any Java
+     * object which might be mapped to this data item.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param membno Field index (0-based) of the field type to retrieve.
+     * @return the offset of the member.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native long H5Tget_member_offset(int type_id, int membno)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Tinsert adds another member to the compound datatype type_id.
+     * 
+     * @param type_id Identifier of compound datatype to modify.
+     * @param name Name of the field to insert.
+     * @param offset Offset in memory structure of the field to insert.
+     * @param field_id Datatype identifier of the field to insert.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tinsert(int type_id, String name, long offset, int field_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tpack recursively removes padding from within a compound datatype to make it more efficient
+     * (space-wise) to store that data.
+     * <P>
+     * <b>WARNING:</b> This call only affects the C-data, even if it succeeds, there may be no
+     * visible effect on Java objects.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tpack(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tclose releases a datatype.
+     * 
+     * @param type_id Identifier of datatype to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tclose(int type_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tenum_create creates a new enumeration datatype based on the specified base datatype,
+     * parent_id, which must be an integer type.
+     * 
+     * @param base_id Identifier of the parent datatype to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tenum_create(int base_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into an 8bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tenum_insert(int type, String name, byte value)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into a 16bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tenum_insert(int type, String name, short value)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into a 32bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tenum_insert(int type, String name, int value)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * Converts the <var>value</var> (in place) to little endian.
+     * 
+     * @return a non-negative value if successful
+     */
+    public static native int H5Tconvert_to_little_endian(short[] value);
+
+    /**
+     * Converts the <var>value</var> (in place) to little endian.
+     * 
+     * @return a non-negative value if successful
+     */
+    public static native int H5Tconvert_to_little_endian(int[] value);
+
+    /**
+     * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the
+     * enumeration datatype type.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @param value IN: The value of the member, data of the correct
+     * @param name OUT: The name of the member
+     * @param size IN: The max length of the name
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tenum_nameof(int type, int[] value, String[] name, int size)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration
+     * datatype type.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @param name IN: The name of the member
+     * @param value OUT: The value of the member
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tenum_valueof(int type, String name, int[] value)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Tvlen_create creates a new variable-length (VL) dataype.
+     * 
+     * @param base_id IN: Identifier of parent datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tvlen_create(int base_id) throws HDF5LibraryException;
+
+    /**
+     * H5Tset_tag tags an opaque datatype type_id with a unique ASCII identifier tag.
+     * 
+     * @param type IN: Identifier of parent datatype.
+     * @param tag IN: Name of the tag (will be stored as ASCII)
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tset_tag(int type, String tag) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_tag returns the tag associated with datatype type_id.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @return the tag
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native String H5Tget_tag(int type) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_super returns the type from which TYPE is derived.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @return the parent type
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Tget_super(int type) throws HDF5LibraryException;
+
+    /**
+     * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
+     * 
+     * @param type_id IN: Identifier of datatype.
+     * @param membno IN: The name of the member
+     * @param value OUT: The value of the member
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Tget_member_value(int type_id, int membno, int[] value)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * * Array data types, new in HDF5.1.4.
+     */
+    /**
+     * Creates an array datatype object.
+     * 
+     * @param base_type_id Datatype identifier for the array base datatype.
+     * @param rank Rank of the array.
+     * @param dims Size of each array dimension.
+     * @return a valid datatype identifier if successful; otherwise returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     * @exception NullPointerException rank is < 1 or dims is null.
+     */
+    public static native int H5Tarray_create(int base_type_id, int rank, int[] dims)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * Returns the rank of an array datatype.
+     * 
+     * @param adtype_id Datatype identifier of array object.
+     * @return the rank of the array if successful; otherwise returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     */
+    public static native int H5Tget_array_ndims(int adtype_id) throws HDF5LibraryException;
+
+    /**
+     * Returns sizes of array dimensions.
+     * 
+     * @param adtype_id IN: Datatype identifier of array object.
+     * @param dims OUT: Sizes of array dimensions.
+     * @return the non-negative number of dimensions of the array type if successful; otherwise
+     *         returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     * @exception NullPointerException dims is null.
+     */
+    public static native int H5Tget_array_dims(int adtype_id, int[] dims)
+            throws HDF5LibraryException, NullPointerException;
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // New APIs for HDF5.1.6 //
+    // removed APIs: H5Pset_hyper_cache, H5Pget_hyper_cache //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * Returns number of objects in the group specified by its identifier
+     * 
+     * @param loc_id Identifier of the group or the file
+     * @param num_obj Number of objects in the group
+     * @return positive value if successful; otherwise returns a negative value.
+     * @throws HDF5LibraryException
+     * @throws NullPointerException
+     */
+    public static native int H5Gget_num_objs(int loc_id, long[] num_obj)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * Returns a name of an object specified by an index.
+     * 
+     * @param group_id Group or file identifier
+     * @param idx Transient index identifying object
+     * @param name the object name
+     * @param size Name length
+     * @return the size of the object name if successful, or 0 if no name is associated with the
+     *         group identifier. Otherwise returns a negative value
+     * @throws HDF5LibraryException
+     * @throws NullPointerException
+     */
+    public static native long H5Gget_objname_by_idx(int group_id, long idx, String[] name, long size)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * Returns the type of an object specified by an index.
+     * 
+     * @param group_id Group or file identifier.
+     * @param idx Transient index identifying object.
+     * @return Returns the type of the object if successful. Otherwise returns a negative value
+     * @throws HDF5LibraryException
+     * @throws NullPointerException
+     */
+    public static native int H5Gget_objtype_by_idx(int group_id, long idx)
+            throws HDF5LibraryException;
+
+    public static native long H5Gget_nlinks(int group_id) throws HDF5LibraryException;
+
+    public static native int H5Tget_native_type(int tid, int alloc_time)
+            throws HDF5LibraryException, NullPointerException;
+
+    //
+    // Backward compatibility:
+    // These functions have been replaced by new HDF5 library calls.
+    // The interface is preserved as a convenience to existing code.
+    //
+    /**
+     * H5Gn_members report the number of objects in a Group. The 'objects' include everything that
+     * will be visited by H5Giterate. Each link is returned, so objects with multiple links will be
+     * counted once for each link.
+     * 
+     * @param loc_id file or group ID.
+     * @param name name of the group to iterate, relative to the loc_id
+     * @return the number of members in the group or -1 if error.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Gn_members(final int loc_id, final String name)
+            throws HDF5LibraryException, NullPointerException
+    {
+        final int grp_id = H5Gopen(loc_id, name, HDF5Constants.H5P_DEFAULT);
+        final long[] nobj = new long[1];
+        nobj[0] = -1;
+        H5Gget_num_objs(grp_id, nobj);
+        final int r = (new Long(nobj[0])).intValue();
+        return (r);
+    }
+
+    /**
+     * H5Gget_obj_info_idx report the name and type of object with index 'idx' in a Group. The 'idx'
+     * corresponds to the index maintained by H5Giterate. Each link is returned, so objects with
+     * multiple links will be counted once for each link.
+     * 
+     * @param loc_id IN: file or group ID.
+     * @param name IN: name of the group to iterate, relative to the loc_id
+     * @param idx IN: the index of the object to iterate.
+     * @param oname the name of the object [OUT]
+     * @param type the type of the object [OUT]
+     * @return non-negative if successful, -1 if not.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Gget_obj_info_idx(final int loc_id, final String name, final int idx,
+            final String[] oname, final int[] type) throws HDF5LibraryException,
+            NullPointerException
+    {
+        final long default_buf_size = 4096;
+        final String n[] = new String[1];
+        n[0] = new String("");
+        final int grp_id = H5Gopen(loc_id, name, HDF5Constants.H5P_DEFAULT);
+        final long val = H5Gget_objname_by_idx(grp_id, idx, n, default_buf_size);
+        final int type_code = H5Gget_objtype_by_idx(grp_id, idx);
+        oname[0] = new String(n[0]);
+        type[0] = type_code;
+        final int ret = (new Long(val)).intValue();
+        return ret;
+    }
+
+    public static int H5Gget_obj_info_all(final int loc_id, final String name,
+            final String[] oname, final int[] type) throws HDF5LibraryException,
+            NullPointerException
+    {
+        if (oname == null)
+        {
+            throw new NullPointerException("H5Gget_obj_info_all(): name array is null");
+        }
+
+        if (type == null)
+        {
+            throw new NullPointerException("H5Gget_obj_info_all(): type array is null");
+        }
+
+        if (oname.length == 0)
+        {
+            throw new HDF5JavaException("H5Gget_obj_info_all(): array size is zero");
+        }
+
+        if (oname.length != type.length)
+        {
+            throw new HDF5JavaException(
+                    "H5Gget_obj_info_all(): name and type array sizes are different");
+        }
+
+        return H5Gget_obj_info_all(loc_id, name, oname, type, oname.length);
+    }
+
+    public static native int H5Gget_obj_info_all(int loc_id, String name, String[] oname,
+            int[] type, int n) throws HDF5LibraryException, NullPointerException;
+
+    //
+    // This function is deprecated. It is recommended that the new
+    // library calls should be used,
+    // H5Gget_objname_by_idx
+    // H5Gget_objtype_by_idx
+    //
+    /**
+     * H5Gget_objinfo returns information about the specified object.
+     * 
+     * @param loc_id IN: File, group, dataset, or datatype identifier.
+     * @param name IN: Name of the object for which status is being sought.
+     * @param follow_link IN: Link flag.
+     * @param fileno OUT: file id numbers.
+     * @param objno OUT: object id numbers.
+     * @param link_info OUT: link information.
+     * 
+     *            <pre>
+     * 
+     *            link_info[0] = nlink link_info[1] = type link_info[2] = linklen
+     * 
+     * </pre>
+     * @param mtime OUT: modification time
+     * @return a non-negative value if successful, with the fields of link_info and mtime (if
+     *         non-null) initialized.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name or array is null.
+     * @exception IllegalArgumentException - bad argument.
+     */
+    public static native int H5Gget_objinfo(int loc_id, String name, boolean follow_link,
+            long[] fileno, long[] objno, int[] link_info, long[] mtime)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+    /**
+     * H5Gget_objinfo returns information about the specified object in an HDF5GroupInfo object.
+     * 
+     * @param loc_id IN: File, group, dataset, or datatype identifier.
+     * @param name IN: Name of the object for which status is being sought.
+     * @param follow_link IN: Link flag.
+     * @param info OUT: the HDF5GroupInfo object to store the object infomation
+     * @return a non-negative value if successful, with the fields of HDF5GroupInfo object (if
+     *         non-null) initialized.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @see ch.systemsx.cisd.hdf5.hdf5lib.HDF5GroupInfo See public static native int
+     *      H5Gget_objinfo();
+     */
+    public static int H5Gget_objinfo(final int loc_id, final String name,
+            final boolean follow_link, final HDF5GroupInfo info) throws HDF5LibraryException,
+            NullPointerException
+    {
+        int status = -1;
+        final long[] fileno = new long[2];
+        final long[] objno = new long[2];
+        final int[] link_info = new int[3];
+        final long[] mtime = new long[1];
+
+        status = H5Gget_objinfo(loc_id, name, follow_link, fileno, objno, link_info, mtime);
+
+        if (status >= 0)
+        {
+            info.setGroupInfo(fileno, objno, link_info[0], link_info[1], mtime[0], link_info[2]);
+        }
+        return status;
+    }
+
+    public static int H5Tget_native_type(final int tid) throws HDF5LibraryException,
+            NullPointerException
+    {
+        return H5Tget_native_type(tid, HDF5Constants.H5T_DIR_ASCEND);
+    }
+
+    public static native int H5Pset_alloc_time(int plist_id, int alloc_time)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_alloc_time(int plist_id, int[] alloc_time)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pset_fill_time(int plist_id, int fill_time)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_fill_time(int plist_id, int[] fill_time)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pfill_value_defined(int plist_id, int[] status)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pset_fletcher32(int plist) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_edc_check(int plist, int check) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pget_edc_check(int plist) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_shuffle(int plist_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_szip(int plist, int options_mask, int pixels_per_block)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Dget_space_status(int dset_id, int[] status)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native long H5Iget_name(int obj_id, String[] name, long size)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5set_free_list_limits(int reg_global_lim, int reg_list_lim,
+            int arr_global_lim, int arr_list_lim, int blk_global_lim, int blk_list_lim)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Fget_obj_ids(int file_id, int types, int max, int[] obj_id_list)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Fget_obj_count(int file_id, int types) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native boolean H5Tis_variable_str(int dtype_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Zfilter_avail(int filter) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Zunregister(int filter) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pmodify_filter(int plist, int filter, int flags, long cd_nelmts,
+            int[] cd_values) throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_filter_by_id(int plist_id, int filter, int[] flags,
+            long[] cd_nelmts, int[] cd_values, long namelen, String[] name)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native boolean H5Pall_filters_avail(int dcpl_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_hyper_vector_size(int dxpl_id, long vector_size)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_hyper_vector_size(int dxpl_id, long[] vector_size)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native boolean H5Tdetect_class(int dtype_id, int dtype_class)
+            throws HDF5LibraryException, NullPointerException;
+
+    // //////////////////////////////////////////////////////////////////
+    // //
+    // New APIs for read data from library //
+    // Using H5Dread(..., Object buf) requires function calls //
+    // theArray.emptyBytes() and theArray.arrayify( buf), which //
+    // triples the actual memory needed by the data set. //
+    // Using the following APIs solves the problem. //
+    // //
+    // //////////////////////////////////////////////////////////////////
+
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, short[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, int[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, long[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, float[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, double[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread_string(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dread_reg_ref(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, short[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, int[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, long[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, float[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, double[] buf) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_fclose_degree(int plist, int degree)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_fclose_degree(int plist_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    // //////////////////////////////////////////////////////////////////
+    // //
+    // New APIs from release 1.6.2 //
+    // August 20, 2004 //
+    // //////////////////////////////////////////////////////////////////
+
+    public static native int H5Iget_ref(int obj_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Iinc_ref(int obj_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Idec_ref(int obj_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_fapl_family(int fapl_id, long memb_size, int memb_fapl_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_fapl_family(int fapl_id, long[] memb_size, int[] memb_fapl_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pset_fapl_core(int fapl_id, int increment, boolean backing_store)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pget_fapl_core(int fapl_id, int[] increment, boolean[] backing_store)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native int H5Pset_family_offset(int fapl_id, long offset)
+            throws HDF5LibraryException, NullPointerException;
+
+    public static native long H5Pget_family_offset(int fapl_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    public static native int H5Pset_fapl_log(int fapl_id, String logfile, int flags, int buf_size)
+            throws HDF5LibraryException, NullPointerException;
+
+    // //////////////////////////////////////////////////////////////////
+    // //
+    // New APIs from release 1.6.3 //
+    // August 25, 2004 //
+    // //////////////////////////////////////////////////////////////////
+
+    public static native long H5Fget_name(int obj_id, String name, int size)
+            throws HDF5LibraryException;
+
+    public static native long H5Fget_filesize(int file_id) throws HDF5LibraryException;
+
+    public static native int H5Iget_file_id(int obj_id) throws HDF5LibraryException;
+
+    public static native int H5Premove_filter(int obj_id, int filter) throws HDF5LibraryException;
+
+    public static native int H5Zget_filter_info(int filter) throws HDF5LibraryException;
+
+    // ////////////////////////////////////////////////////////////////////////
+    // Modified by Peter Cao on July 26, 2006: //
+    // Some of the Generic Property APIs have callback function //
+    // pointers, which Java does not support. Only the Generic //
+    // Property APIs without function pointers are implemented //
+    // ////////////////////////////////////////////////////////////////////////
+
+    /**
+     * Creates a new property list class of a given class
+     * 
+     * @param cls IN: Class of property list to create
+     * @return a valid property list identifier if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pcreate_list(int cls) throws HDF5LibraryException;
+
+    /**
+     * Sets a property list value (support integer only)
+     * 
+     * @param plid IN: Property list identifier to modify
+     * @param name IN: Name of property to modify
+     * @param value IN: value to set the property to
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pset(int plid, String name, int value) throws HDF5LibraryException;
+
+    /**
+     * H5Pexist determines whether a property exists within a property list or class
+     * 
+     * @param plid IN: Identifier for the property to query
+     * @param name IN: Name of property to check for
+     * @return a positive value if the property exists in the property object; zero if the property
+     *         does not exist; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pexist(int plid, String name) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_size retrieves the size of a property's value in bytes
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @param name IN: Name of property to query
+     * @return size of a property's value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native long H5Pget_size(int plid, String name) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_nprops retrieves the number of properties in a property list or class
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @return number of properties if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native long H5Pget_nprops(int plid) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_class_name retrieves the name of a generic property list class
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @return name of a property list if successful; null if failed
+     * @throws HDF5LibraryException
+     */
+    public static native String H5Pget_class_name(int plid) throws HDF5LibraryException;
+
+    /**
+     * H5Pget_class_parent retrieves an identifier for the parent class of a property class
+     * 
+     * @param plid IN: Identifier of the property class to query
+     * @return a valid parent class object identifier if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pget_class_parent(int plid) throws HDF5LibraryException;
+
+    /**
+     * H5Pisa_class checks to determine whether a property list is a member of the specified class
+     * 
+     * @param plist IN: Identifier of the property list
+     * @param pclass IN: Identifier of the property class
+     * @return a positive value if equal; zero if unequal; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pisa_class(int plist, int pclass) throws HDF5LibraryException;
+
+    /**
+     * H5Pget retrieves a copy of the value for a property in a property list (support integer only)
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @param name IN: Name of property to query
+     * @return value for a property if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pget(int plid, String name) throws HDF5LibraryException;
+
+    /**
+     * H5Pequal determines if two property lists or classes are equal
+     * 
+     * @param plid1 IN: First property object to be compared
+     * @param plid2 IN: Second property object to be compared
+     * @return positive value if equal; zero if unequal, a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pequal(int plid1, int plid2) throws HDF5LibraryException;
+
+    /**
+     * H5Pcopy_prop copies a property from one property list or class to another
+     * 
+     * @param dst_id IN: Identifier of the destination property list or class
+     * @param src_id IN: Identifier of the source property list or class
+     * @param name IN: Name of the property to copy
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pcopy_prop(int dst_id, int src_id, String name)
+            throws HDF5LibraryException;
+
+    /**
+     * H5Premove removes a property from a property list
+     * 
+     * @param plid IN: Identifier of the property list to modify
+     * @param name IN: Name of property to remove
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Premove(int plid, String name) throws HDF5LibraryException;
+
+    /**
+     * H5Punregister removes a property from a property list class
+     * 
+     * @param plid IN: Property list class from which to remove permanent property
+     * @param name IN: Name of property to remove
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Punregister(int plid, String name) throws HDF5LibraryException;
+
+    /**
+     * Closes an existing property list class
+     * 
+     * @param plid IN: Property list class to close
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static native int H5Pclose_class(int plid) throws HDF5LibraryException;
+
+    // //////////////////////////////////////////////////////////////////
+    // //
+    // New APIs from release 1.8.0 //
+    // January 21, 2008 //
+    // //////////////////////////////////////////////////////////////////
+
+    /**
+     * Sets the permissible bounds of the library's file format versions.
+     * <p>
+     * Can be set on the file access property list.
+     * <p>
+     * As of 1.8.0, only the combinations <code>low=H5F_LIBVER_EARLIEST</code> / <code>
+     * high=H5F_LIBVER_LATEST</code> (which is the default and means that 1.6 compatible files are
+     * created if no features are used that require a 1.8 format) and <code>low=H5F_LIBVER_LATEST
+     * </code> / <code>high=H5F_LIBVER_LATEST</code> (which means that always 1.8 files are created
+     * which cannot be read by an earlier library) are allowed.
+     * 
+     * @param plist_id Property list identifier.
+     * @param low The lower permissible bound. One of <code>H5F_LIBVER_LATEST</code> or <code>
+     *            H5F_LIBVER_LATEST</code> .
+     * @param high The higher permissible bound. Must be <code>H5F_LIBVER_LATEST</code>.
+     * @return a non-negative value if successful
+     */
+    public static native int H5Pset_libver_bounds(int plist_id, int low, int high)
+            throws HDF5LibraryException;
+
+    /**
+     * Returns the permissible bounds of the library's file format versions.
+     * 
+     * @param plist_id Property list identifier.
+     * @return an array containing <code>[low, high]</code> on success
+     */
+    public static native int[] H5Pget_libver_bounds(int plist_id) throws HDF5LibraryException;
+
+    /**
+     * Sets the local heap size hint for an old-style group. This is the chunk size allocated on the
+     * heap for a group.
+     * 
+     * @param gcpl_id The group creation property list to change the heap size hint for
+     * @param size_hint The size hint to set.
+     * @return a non-negative value if successful
+     */
+    public static native int H5Pset_local_heap_size_hint(int gcpl_id, int size_hint);
+
+    /**
+     * Returns the local heap size hint for an old-style group. This is the chunk size allocated on
+     * the heap for a group.
+     * 
+     * @param gcpl_id The group creation property list to change the heap size hint for
+     * @return The size hint of the group if successful
+     */
+    public static native int H5Pget_local_heap_size_hint(int gcpl_id);
+
+    /**
+     * Sets the phase change parameters for a new-style group.
+     * 
+     * @param gcpl_id The group creation property list to set the link phase changes for
+     * @param max_compact The maximum number of links in a group to store as header messages
+     * @param min_dense The minimum number of links in a group to in the dense format
+     * @return a non-negative value if successful
+     */
+    public static native int H5Pset_link_phase_change(int gcpl_id, int max_compact, int min_dense);
+
+    /**
+     * Returns the phase change parameters for a new-style group.
+     * 
+     * @param gcpl_id The group creation property list to set the link phase changes for
+     * @return the phase change parameters as array [max_compact, min_dense] if successful
+     */
+    public static native int[] H5Pget_link_phase_change(int gcpl_id);
+
+    /**
+     * Sets the character encoding for the given creation property list to the given encoding.
+     * 
+     * @param cpl_id The creation property list to set the character encoding for.
+     * @param encoding The encoding (one of {@link HDF5Constants#H5T_CSET_ASCII} or
+     *            {@link HDF5Constants#H5T_CSET_UTF8}) to use.
+     * @return a non-negative value if successful
+     */
+    public static native int H5Pset_char_encoding(int cpl_id, int encoding);
+
+    /**
+     * Returns the character encoding currently set for a creation property list.
+     * 
+     * @param cpl_id The creation property list to get the character encoding for.
+     * @return The encoding, one of {@link HDF5Constants#H5T_CSET_ASCII} or
+     *         {@link HDF5Constants#H5T_CSET_UTF8}.
+     */
+    public static native int H5Pget_char_encoding(int cpl_id);
+
+    /**
+     * H5Oopen opens an existing object with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id File or group identifier within which object is to be open.
+     * @param name Name of object to open.
+     * @param access_plist_id Object access property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @return a valid object identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Oopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Oclose releases resources used by an object which was opened by a call to H5Oopen().
+     * 
+     * @param loc_id Object identifier to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Oclose(int loc_id) throws HDF5LibraryException;
+
+    /**
+     * H5Ocopy copies an existing object with the specified src_name at the specified location,
+     * src_loc_id, to the specified dst_name at the specified destination location, dst_loc_id.
+     * 
+     * @param src_loc_id Source File or group identifier within which object is to be open.
+     * @param src_name Name of source object to open.
+     * @param dst_loc_id Destination File or group identifier within which object is to be open.
+     * @param dst_name Name of destination object to open.
+     * @param object_copy_plist Object copy property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @param link_creation_plist Link creation property list identifier for the new hard link
+     *            (H5P_DEFAULT for the default property list).
+     * @return a valid object identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static native int H5Ocopy(int src_loc_id, String src_name, int dst_loc_id,
+            String dst_name, int object_copy_plist, int link_creation_plist)
+            throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Oget_info_by_name returns information about the object. This method follows soft links and
+     * returns information about the link target, rather than the link.
+     * <p>
+     * If not <code>null</code>, <var>info</var> needs to be an array of length 5 and will return
+     * the following information in each index:
+     * <ul>
+     * <li>0: filenumber that the object is in</li>
+     * <li>1: address of the object in the file</li>
+     * <li>2: reference count of the object (will be {@code > 1} if more than one hard link exists
+     * to the object)</li>
+     * <li>3: creation time of the object (in seconds since start of the epoch)</li>
+     * <li>4: number of attributes that this object has</li>
+     * </ul>
+     * 
+     * @param loc_id File or group identifier within which object is to be open.
+     * @param object_name Name of object to get info for.
+     * @param infoOrNull If not <code>null</code>, it will return additional information about this
+     *            object. Needs to be either <code>null</code> or an array of length 5.
+     * @param exception_when_non_existent If <code>true</code>, -1 will be returned when the object
+     *            does not exist, otherwise a HDF5LibraryException will be thrown.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static native int H5Oget_info_by_name(int loc_id, String object_name, long[] infoOrNull,
+            boolean exception_when_non_existent) throws HDF5LibraryException;
+
+    /**
+     * H5Lcreate_hard creates a hard link for an already existing object.
+     * 
+     * @param obj_loc_id File, group, dataset, or datatype identifier of the existing object
+     * @param obj_name A name of the existing object
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static native int H5Lcreate_hard(int obj_loc_id, String obj_name, int link_loc_id,
+            String link_name, int lcpl_id, int lapl_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Lcreate_soft creates a soft link to some target path.
+     * 
+     * @param target_path The path of the link target
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static native int H5Lcreate_soft(String target_path, int link_loc_id, String link_name,
+            int lcpl_id, int lapl_id) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Lcreate_external creates an external link to some object in another file.
+     * 
+     * @param file_name File name of the link target
+     * @param obj_name Object name of the link target
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static native int H5Lcreate_external(String file_name, String obj_name, int link_loc_id,
+            String link_name, int lcpl_id, int lapl_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Lmove moves a link atomically to a new group or renames it.
+     * 
+     * @param src_loc_id The old location identifier of the object to be renamed
+     * @param src_name The old name of the object to be renamed
+     * @param dst_loc_id The new location identifier of the link
+     * @param dst_name The new name the object
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static native int H5Lmove(int src_loc_id, String src_name, int dst_loc_id,
+            String dst_name, int lcpl_id, int lapl_id) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Lexists returns <code>true</code> if a link with <var>name</var> exists and <code>false
+     * </code> otherwise.
+     * <p>
+     * <i>Note:</i> The Java wrapper differs from the low-level C routine in that it will return
+     * <code>false</code> if <var>name</var> is a path that contains groups which don't exist (the C
+     * routine will give you an <code>H5E_NOTFOUND</code> in this case).
+     */
+    public static native boolean H5Lexists(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException;
+
+    /**
+     * H5Lget_link_info returns the type of the link. If <code>lname != null</code> and
+     * <var>name</var> is a symbolic link, <code>lname[0]</code> will contain the target of the
+     * link. If <var>exception_when_non_existent</var> is <code>true</code>, the method will throw
+     * an exception when the link does not exist, otherwise -1 will be returned.
+     */
+    public static native int H5Lget_link_info(int loc_id, String name, String[] lname,
+            boolean exception_when_non_existent) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Lget_link_info_all returns the names, types and link targets of all links in group
+     * <var>name</var>.
+     */
+    public static int H5Lget_link_info_all(final int loc_id, final String name,
+            final String[] oname, final int[] type, final String[] lname)
+            throws HDF5LibraryException, NullPointerException
+    {
+        if (oname == null)
+        {
+            throw new NullPointerException("H5Lget_obj_info_all(): name array is null");
+        }
+
+        if (type == null)
+        {
+            throw new NullPointerException("H5Lget_obj_info_all(): type array is null");
+        }
+
+        if (oname.length != type.length)
+        {
+            throw new HDF5JavaException(
+                    "H5Lget_obj_info_all(): oname and type array sizes are different");
+        }
+        if (lname != null && oname.length != lname.length)
+        {
+            throw new HDF5JavaException(
+                    "H5Lget_obj_info_all(): oname and lname array sizes are different");
+        }
+
+        return H5Lget_link_info_all(loc_id, name, oname, type, lname, oname.length);
+    }
+
+    public static native int H5Lget_link_info_all(int loc_id, String name, String[] oname,
+            int[] type, String[] lname, int n) throws HDF5LibraryException, NullPointerException;
+
+    /**
+     * H5Lget_link_names_all returns the names of all links in group <var>name</var>.
+     */
+    public static int H5Lget_link_names_all(final int loc_id, final String name,
+            final String[] oname) throws HDF5LibraryException, NullPointerException
+    {
+        if (oname == null)
+        {
+            throw new NullPointerException("H5Lget_obj_info_all(): name array is null");
+        }
+
+        return H5Lget_link_names_all(loc_id, name, oname, oname.length);
+    }
+
+    public static native int H5Lget_link_names_all(int loc_id, String name, String[] oname, int n)
+            throws HDF5LibraryException, NullPointerException;
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5A.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5A.java
new file mode 100644
index 0000000..f2eaa09
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5A.java
@@ -0,0 +1,494 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 attribute functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ *
+ * @author Bernd Rinn
+ */
+public class H5A
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Lexists returns <code>true</code> if an attribute with <var>name</var> exists for the
+     * object defined by <var>obj_id</var> and <code> false </code> otherwise.
+     */
+    public static boolean H5Aexists(int obj_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aexists(obj_id, name);
+        }
+    }
+
+    /**
+     * H5Acreate creates an attribute which is attached to the object specified with loc_id.
+     * 
+     * @param loc_id IN: Object (dataset, group, or named datatype) to be attached to.
+     * @param name IN: Name of attribute to create.
+     * @param type_id IN: Identifier of datatype for attribute.
+     * @param space_id IN: Identifier of dataspace for attribute.
+     * @param create_plist_id IN: Identifier of creation property list (currently not used).
+     * @param access_plist_id IN: Attribute access property list identifier (currently not used).
+     * @return an attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Acreate(int loc_id, String name, int type_id, int space_id,
+            int create_plist_id, int access_plist_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Acreate(loc_id, name, type_id, space_id, create_plist_id, access_plist_id);
+        }
+    }
+
+    /**
+     * H5Aopen_name opens an attribute specified by its name, name, which is attached to the object
+     * specified with loc_id.
+     * 
+     * @param loc_id IN: Identifier of a group, dataset, or named datatype atttribute
+     * @param name IN: Attribute name.
+     * @return attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Aopen_name(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aopen_name(loc_id, name);
+        }
+    }
+
+    /**
+     * H5Aopen_idx opens an attribute which is attached to the object specified with loc_id. The
+     * location object may be either a group, dataset, or named datatype, all of which may have any
+     * sort of attribute.
+     * 
+     * @param loc_id IN: Identifier of the group, dataset, or named datatype attribute
+     * @param idx IN: Index of the attribute to open.
+     * @return attribute identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Aopen_idx(int loc_id, int idx) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aopen_idx(loc_id, idx);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, byte[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, short[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, int[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, long[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, float[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is written from buf to the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to write.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Data to be written.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data is null.
+     */
+    public static int H5Awrite(int attr_id, int mem_type_id, double[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Awrite(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5AwriteString writes a (partial) variable length String attribute, specified by its
+     * identifier attr_id, from the application memory buffer buf into the file.
+     * 
+     * @param attr_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5AwriteString(int attr_id, int mem_type_id, String[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5AwriteString(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Acopy copies the content of one attribute to another.
+     * 
+     * @param src_aid the identifier of the source attribute
+     * @param dst_aid the identifier of the destinaiton attribute
+     */
+    public static int H5Acopy(int src_aid, int dst_aid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Acopy(src_aid, dst_aid);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, byte[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, short[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, int[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, long[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, float[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is
+     * specified with mem_type_id. The entire attribute is read into buf from the file.
+     * 
+     * @param attr_id IN: Identifier of an attribute to read.
+     * @param mem_type_id IN: Identifier of the attribute datatype (in memory).
+     * @param buf IN: Buffer for data to be read.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Aread(int attr_id, int mem_type_id, double[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aread(attr_id, mem_type_id, buf);
+        }
+    }
+
+    public static int H5AreadVL(int attr_id, int mem_type_id, String[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5AreadVL(attr_id, mem_type_id, buf);
+        }
+    }
+
+    /**
+     * H5Aget_space retrieves a copy of the dataspace for an attribute.
+     * 
+     * @param attr_id IN: Identifier of an attribute.
+     * @return attribute dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Aget_space(int attr_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aget_space(attr_id);
+        }
+    }
+
+    /**
+     * H5Aget_type retrieves a copy of the datatype for an attribute.
+     * 
+     * @param attr_id IN: Identifier of an attribute.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Aget_type(int attr_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aget_type(attr_id);
+        }
+    }
+
+    /**
+     * H5Aget_name retrieves the name of an attribute specified by the identifier, attr_id.
+     * 
+     * @param attr_id IN: Identifier of the attribute.
+     * @param buf_size IN: The size of the buffer to store the name in.
+     * @param name OUT: Buffer to store name in.
+     * @exception ArrayIndexOutOfBoundsException JNI error writing back array
+     * @exception ArrayStoreException JNI error writing back array
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @exception IllegalArgumentException - bub_size <= 0.
+     * @return the length of the attribute's name if successful.
+     */
+    public static long H5Aget_name(int attr_id, long buf_size, String[] name)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aget_name(attr_id, buf_size, name);
+        }
+    }
+
+    /**
+     * H5Aget_num_attrs returns the number of attributes attached to the object specified by its
+     * identifier, loc_id.
+     * 
+     * @param loc_id IN: Identifier of a group, dataset, or named datatype.
+     * @return the number of attributes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Aget_num_attrs(int loc_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aget_num_attrs(loc_id);
+        }
+    }
+
+    /**
+     * H5Adelete removes the attribute specified by its name, name, from a dataset, group, or named
+     * datatype.
+     * 
+     * @param loc_id IN: Identifier of the dataset, group, or named datatype.
+     * @param name IN: Name of the attribute to delete.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Adelete(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Adelete(loc_id, name);
+        }
+    }
+
+    /**
+     * H5Aclose terminates access to the attribute specified by its identifier, attr_id.
+     * 
+     * @param attr_id IN: Attribute to release access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Aclose(int attr_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Aclose(attr_id);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5D.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5D.java
new file mode 100644
index 0000000..d71f150
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5D.java
@@ -0,0 +1,503 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 dataset functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5D
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Dcreate creates a data set with a name, name, in the file or in the group specified by the
+     * identifier loc_id.
+     * 
+     * @param loc_id Identifier of the file or group to create the dataset within.
+     * @param name The name of the dataset to create.
+     * @param type_id Identifier of the datatype to use when creating the dataset.
+     * @param space_id Identifier of the dataspace to use when creating the dataset.
+     * @param link_create_plist_id Identifier of the link creation property list.
+     * @param dset_create_plist_id Identifier of the dataset creation property list.
+     * @param dset_access_plist_id Identifier of the dataset access property list.
+     * @return a dataset identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Dcreate(int loc_id, String name, int type_id, int space_id,
+            int link_create_plist_id, int dset_create_plist_id, int dset_access_plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dcreate(loc_id, name, type_id, space_id, link_create_plist_id,
+                    dset_create_plist_id, dset_access_plist_id);
+
+        }
+    }
+
+    /**
+     * H5Dopen opens an existing dataset for access in the file or group specified in loc_id.
+     * 
+     * @param loc_id Identifier of the dataset to open or the file or group
+     * @param name The name of the dataset to access.
+     * @param access_plist_id Dataset access property list.
+     * @return a dataset identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Dopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dopen(loc_id, name, access_plist_id);
+        }
+    }
+
+    public static int H5Dchdir_ext(String dir_name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dchdir_ext(dir_name);
+        }
+    }
+
+    public static int H5Dgetdir_ext(String[] dir_name, int size) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dgetdir_ext(dir_name, size);
+        }
+    }
+
+    /**
+     * H5Dget_space returns an identifier for a copy of the dataspace for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Dget_space(int dataset_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dget_space(dataset_id);
+        }
+    }
+
+    /**
+     * H5Dget_type returns an identifier for a copy of the datatype for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Dget_type(int dataset_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dget_type(dataset_id);
+        }
+    }
+
+    /**
+     * H5Dget_create_plist returns an identifier for a copy of the dataset creation property list
+     * for a dataset.
+     * 
+     * @param dataset_id Identifier of the dataset to query.
+     * @return a dataset creation property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Dget_create_plist(int dataset_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dget_create_plist(dataset_id);
+        }
+    }
+
+    /**
+     * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into
+     * the application memory buffer buf.
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer to store data read from the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - data buffer is null.
+     */
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, byte[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5DreadVL(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, Object[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5DreadVL(dataset_id, mem_type_id, mem_space_id, file_space_id,
+                    xfer_plist_id, buf);
+        }
+    }
+
+    /**
+     * H5DwriteString writes a (partial) variable length String dataset, specified by its identifier
+     * dataset_id, from the application memory buffer buf into the file.
+     * <p>
+     * <i>contributed by Rosetta Biosoftware.</i>
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5DwriteString(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5DwriteString(dataset_id, mem_type_id, mem_space_id, file_space_id,
+                    xfer_plist_id, buf);
+        }
+    }
+
+    /**
+     * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the
+     * application memory buffer buf into the file.
+     * 
+     * @param dataset_id Identifier of the dataset read from.
+     * @param mem_type_id Identifier of the memory datatype.
+     * @param mem_space_id Identifier of the memory dataspace.
+     * @param file_space_id Identifier of the dataset's dataspace in the file.
+     * @param xfer_plist_id Identifier of a transfer property list for this I/O operation.
+     * @param buf Buffer with data to be written to the file.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, byte[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    /**
+     * H5Dextend verifies that the dataset is at least of size size.
+     * 
+     * @param dataset_id Identifier of the dataset.
+     * @param size Array containing the new magnitude of each dimension.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size array is null.
+     */
+    public static int H5Dextend(int dataset_id, byte[] size) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dextend(dataset_id, size);
+        }
+    }
+
+    public static int H5Dextend(final int dataset_id, final long[] size) throws HDF5Exception,
+            NullPointerException
+    {
+        final byte[] buf = HDFNativeData.longToByte(size);
+
+        return H5Dextend(dataset_id, buf);
+    }
+
+    /**
+     * H5Dset_extent sets the size of the dataset to <var>size</var>. Make sure that no important
+     * are lost since this method will not check that the data dimensions are not larger than
+     * <var>size</var>.
+     * 
+     * @param dataset_id Identifier of the dataset.
+     * @param size Array containing the new magnitude of each dimension.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size array is null.
+     */
+    public static int H5Dset_extent(int dataset_id, byte[] size) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dset_extent(dataset_id, size);
+        }
+    }
+
+    public static int H5Dset_extent(final int dataset_id, final long[] size) throws HDF5Exception,
+            NullPointerException
+    {
+        final byte[] buf = HDFNativeData.longToByte(size);
+
+        return H5Dset_extent(dataset_id, buf);
+    }
+
+    /**
+     * H5Dclose ends access to a dataset specified by dataset_id and releases resources used by it.
+     * 
+     * @param dataset_id Identifier of the dataset to finish access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Dclose(int dataset_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dclose(dataset_id);
+        }
+    }
+
+    /**
+     * H5Dget_storage_size returns the amount of storage that is required for the dataset.
+     * 
+     * @param dataset_id Identifier of the dataset in question
+     * @return he amount of storage space allocated for the dataset.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Dget_storage_size(int dataset_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dget_storage_size(dataset_id);
+        }
+    }
+
+    /**
+     * H5Dcopy copies the content of one dataset to another dataset.
+     * 
+     * @param src_did the identifier of the source dataset
+     * @param dst_did the identifier of the destinaiton dataset
+     */
+    public static int H5Dcopy(int src_did, int dst_did) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dcopy(src_did, dst_did);
+        }
+    }
+
+    /*
+     *
+     */
+    public static int H5Dvlen_get_buf_size(int dataset_id, int type_id, int space_id, int[] size)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dvlen_get_buf_size(dataset_id, type_id, space_id, size);
+        }
+    }
+
+    /**
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static int H5Dvlen_reclaim(int type_id, int space_id, int xfer_plist_id, byte[] buf)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dvlen_reclaim(type_id, space_id, xfer_plist_id, buf);
+        }
+    }
+
+    public static int H5Dget_space_status(int dset_id, int[] status) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dget_space_status(dset_id, status);
+        }
+    }
+
+    // //////////////////////////////////////////////////////////////////
+    // //
+    // New APIs for read data from library //
+    // Using H5Dread(..., Object buf) requires function calls //
+    // theArray.emptyBytes() and theArray.arrayify( buf), which //
+    // triples the actual memory needed by the data set. //
+    // Using the following APIs solves the problem. //
+    // //
+    // //////////////////////////////////////////////////////////////////
+
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, short[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, int[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, long[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, float[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dread(int dataset_id, int mem_type_id, int mem_space_id, int file_space_id,
+            int xfer_plist_id, double[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dread_string(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread_string(dataset_id, mem_type_id, mem_space_id, file_space_id,
+                    xfer_plist_id, buf);
+        }
+    }
+
+    public static int H5Dread_reg_ref(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, String[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dread_reg_ref(dataset_id, mem_type_id, mem_space_id, file_space_id,
+                    xfer_plist_id, buf);
+        }
+    }
+
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, short[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, int[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, long[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, float[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+    public static int H5Dwrite(int dataset_id, int mem_type_id, int mem_space_id,
+            int file_space_id, int xfer_plist_id, double[] buf) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+                    buf);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5F.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5F.java
new file mode 100644
index 0000000..6f66abe
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5F.java
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 file functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5F
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Fopen opens an existing file and is the primary function for accessing existing HDF5 files.
+     * 
+     * @param name Name of the file to access.
+     * @param flags File access flags.
+     * @param access_id Identifier for the file access properties list.
+     * @return a file identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Fopen(String name, int flags, int access_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fopen(name, flags, access_id);
+        }
+    }
+
+    /**
+     * H5Fcreate is the primary function for creating HDF5 files.
+     * 
+     * @param name Name of the file to access.
+     * @param flags File access flags. Possible values include:
+     *            <UL>
+     *            <LI>H5F_ACC_RDWR Allow read and write access to file.</LI>
+     *            <LI>H5F_ACC_RDONLY Allow read-only access to file.</LI>
+     *            <LI>H5F_ACC_TRUNC Truncate file, if it already exists, erasing all data previously
+     *            stored in the file.</LI>
+     *            <LI>H5F_ACC_EXCL Fail if file already exists.</LI>
+     *            <LI>H5F_ACC_DEBUG Print debug information.</LI>
+     *            <LI>H5P_DEFAULT Apply default file access and creation properties.</LI>
+     *            </UL>
+     * @param create_id File creation property list identifier, used when modifying default file
+     *            meta-data. Use H5P_DEFAULT for default access properties.
+     * @param access_id File access property list identifier. If parallel file access is desired,
+     *            this is a collective call according to the communicator stored in the access_id
+     *            (not supported in Java). Use H5P_DEFAULT for default access properties.
+     * @return a file identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Fcreate(String name, int flags, int create_id, int access_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fcreate(name, flags, create_id, access_id);
+        }
+    }
+
+    /**
+     * H5Fflush causes all buffers associated with a file or object to be immediately flushed
+     * (written) to disk without removing the data from the (memory) cache.
+     * <P>
+     * After this call completes, the file (or object) is in a consistent state and all data written
+     * to date is assured to be permanent.
+     * 
+     * @param object_id Identifier of object used to identify the file. <b>object_id</b> can be any
+     *            object associated with the file, including the file itself, a dataset, a group, an
+     *            attribute, or a named data type.
+     * @param scope specifies the scope of the flushing action, in the case that the HDF-5 file is
+     *            not a single physical file.
+     *            <P>
+     *            Valid values are:
+     *            <UL>
+     *            <LI>H5F_SCOPE_GLOBAL Flushes the entire virtual file.</LI>
+     *            <LI>H5F_SCOPE_LOCAL Flushes only the specified file.</LI>
+     *            </UL>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Fflush(int object_id, int scope) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fflush(object_id, scope);
+        }
+    }
+
+    /**
+     * H5Fis_hdf5 determines whether a file is in the HDF5 format.
+     * 
+     * @param name File name to check format.
+     * @return true if is HDF-5, false if not.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static boolean H5Fis_hdf5(String name) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fis_hdf5(name);
+        }
+    }
+
+    /**
+     * H5Fget_create_plist returns a file creation property list identifier identifying the creation
+     * properties used to create this file.
+     * 
+     * @param file_id Identifier of the file to get creation property list
+     * @return a file creation property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Fget_create_plist(int file_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_create_plist(file_id);
+        }
+    }
+
+    /**
+     * H5Fget_access_plist returns the file access property list identifier of the specified file.
+     * 
+     * @param file_id Identifier of file to get access property list of
+     * @return a file access property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Fget_access_plist(int file_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_access_plist(file_id);
+        }
+    }
+
+    /**
+     * H5Fclose terminates access to an HDF5 file.
+     * 
+     * @param file_id Identifier of a file to terminate access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Fclose(int file_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fclose(file_id);
+        }
+    }
+
+    /**
+     * H5Fmount mounts the file specified by child_id onto the group specified by loc_id and name
+     * using the mount properties plist_id.
+     * 
+     * @param loc_id The identifier for the group onto which the file specified by child_id is to be
+     *            mounted.
+     * @param name The name of the group onto which the file specified by child_id is to be mounted.
+     * @param child_id The identifier of the file to be mounted.
+     * @param plist_id The identifier of the property list to be used.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Fmount(int loc_id, String name, int child_id, int plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fmount(loc_id, name, child_id, plist_id);
+        }
+    }
+
+    /**
+     * Given a mount point, H5Funmount dissassociates the mount point's file from the file mounted
+     * there.
+     * 
+     * @param loc_id The identifier for the location at which the specified file is to be unmounted.
+     * @param name The name of the file to be unmounted.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Funmount(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Funmount(loc_id, name);
+        }
+    }
+
+    /**
+     * H5Freopen reopens an HDF5 file.
+     * 
+     * @param file_id Identifier of a file to terminate and reopen access to.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @return a new file identifier if successful
+     */
+    public static int H5Freopen(int file_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Freopen(file_id);
+        }
+    }
+
+    public static int H5Fget_obj_ids(int file_id, int types, int max, int[] obj_id_list)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_obj_ids(file_id, types, max, obj_id_list);
+        }
+    }
+
+    public static int H5Fget_obj_count(int file_id, int types) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_obj_count(file_id, types);
+        }
+    }
+
+    public static long H5Fget_name(int obj_id, String name, int size) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_name(obj_id, name, size);
+        }
+    }
+
+    public static long H5Fget_filesize(int file_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Fget_filesize(file_id);
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5GLO.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5GLO.java
new file mode 100644
index 0000000..8fdefc0
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5GLO.java
@@ -0,0 +1,444 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 group, link and object functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5GLO
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5G: Group Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Gcreate creates a new group with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id The file or group identifier.
+     * @param name The absolute or relative name of the new group.
+     * @param link_create_plist_id Property list for link creation.
+     * @param group_create_plist_id Property list for group creation.
+     * @param group_access_plist_id Property list for group access.
+     * @return a valid group identifier for the open group if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Gcreate(int loc_id, String name, int link_create_plist_id,
+            int group_create_plist_id, int group_access_plist_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gcreate(loc_id, name, link_create_plist_id, group_create_plist_id,
+                    group_access_plist_id);
+        }
+    }
+
+    /**
+     * H5Gopen opens an existing group with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id File or group identifier within which group is to be open.
+     * @param name Name of group to open.
+     * @param access_plist_id Group access property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @return a valid group identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Gopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gopen(loc_id, name, access_plist_id);
+        }
+    }
+
+    /**
+     * H5Gclose releases resources used by a group which was opened by a call to H5Gcreate() or
+     * H5Gopen().
+     * 
+     * @param group_id Group identifier to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Gclose(int group_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gclose(group_id);
+        }
+    }
+
+    /**
+     * H5Gunlink removes an association between a name and an object.
+     * 
+     * @param loc_id Identifier of the file containing the object.
+     * @param name Name of the object to unlink.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Gunlink(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gunlink(loc_id, name);
+        }
+    }
+
+    /**
+     * H5Gset_comment sets the comment for the the object name to comment. Any previously existing
+     * comment is overwritten.
+     * 
+     * @param loc_id IN: Identifier of the file, group, dataset, or datatype.
+     * @param name IN: Name of the object whose comment is to be set or reset.
+     * @param comment IN: The new comment.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name or comment is null.
+     */
+    public static int H5Gset_comment(int loc_id, String name, String comment)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gset_comment(loc_id, name, comment);
+        }
+    }
+
+    /**
+     * H5Gget_comment retrieves the comment for the the object name. The comment is returned in the
+     * buffer comment.
+     * 
+     * @param loc_id IN: Identifier of the file, group, dataset, or datatype.
+     * @param name IN: Name of the object whose comment is to be set or reset.
+     * @param bufsize IN: Anticipated size of the buffer required to hold comment.
+     * @param comment OUT: The comment.
+     * @return the number of characters in the comment, counting the null terminator, if successful
+     * @exception ArrayIndexOutOfBoundsException - JNI error writing back data
+     * @exception ArrayStoreException - JNI error writing back data
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     * @exception IllegalArgumentException - size < 1, comment is invalid.
+     */
+    public static int H5Gget_comment(int loc_id, String name, int bufsize, String[] comment)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gget_comment(loc_id, name, bufsize, comment);
+        }
+    }
+
+    /**
+     * Returns the number of links in the group specified by group_id.
+     * 
+     * @param group_id Group identifier.
+     * @return Return the number of link in the group if successful.
+     */
+    public static long H5Gget_nlinks(int group_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Gget_nlinks(group_id);
+        }
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5G: Object Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Oopen opens an existing object with the specified name at the specified location, loc_id.
+     * 
+     * @param loc_id File or group identifier within which object is to be open.
+     * @param name Name of object to open.
+     * @param access_plist_id Object access property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @return a valid object identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Oopen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Oopen(loc_id, name, access_plist_id);
+        }
+    }
+
+    /**
+     * H5Oclose releases resources used by an object which was opened by a call to H5Oopen().
+     * 
+     * @param loc_id Object identifier to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Oclose(int loc_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Oclose(loc_id);
+        }
+    }
+
+    /**
+     * H5Ocopy copies an existing object with the specified src_name at the specified location,
+     * src_loc_id, to the specified dst_name at the specified destination location, dst_loc_id.
+     * 
+     * @param src_loc_id Source File or group identifier within which object is to be open.
+     * @param src_name Name of source object to open.
+     * @param dst_loc_id Destination File or group identifier within which object is to be open.
+     * @param dst_name Name of destination object to open.
+     * @param object_copy_plist Object copy property list identifier (H5P_DEFAULT for the default
+     *            property list).
+     * @param link_creation_plist Link creation property list identifier for the new hard link
+     *            (H5P_DEFAULT for the default property list).
+     * @return a valid object identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Ocopy(int src_loc_id, String src_name, int dst_loc_id, String dst_name,
+            int object_copy_plist, int link_creation_plist) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Ocopy(src_loc_id, src_name, dst_loc_id, dst_name, object_copy_plist,
+                    link_creation_plist);
+        }
+    }
+
+    /**
+     * H5Oget_info_by_name returns information about the object. This method follows soft links and
+     * returns information about the link target, rather than the link.
+     * <p>
+     * If not <code>null</code>, <var>info</var> needs to be an array of length 5 and will return
+     * the following information in each index:
+     * <ul>
+     * <li>0: filenumber that the object is in</li>
+     * <li>1: address of the object in the file</li>
+     * <li>2: reference count of the object (will be {@code > 1} if more than one hard link exists
+     * to the object)</li>
+     * <li>3: creation time of the object (in seconds since start of the epoch)</li>
+     * <li>4: number of attributes that this object has</li>
+     * </ul>
+     * 
+     * @param loc_id File or group identifier within which object is to be open.
+     * @param object_name Name of object to get info for.
+     * @param infoOrNull If not <code>null</code>, it will return additional information about this
+     *            object. Needs to be either <code>null</code> or an array of length 5.
+     * @param exception_when_non_existent If <code>true</code>, -1 will be returned when the object
+     *            does not exist, otherwise a HDF5LibraryException will be thrown.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Oget_info_by_name(int loc_id, String object_name, long[] infoOrNull,
+            boolean exception_when_non_existent) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Oget_info_by_name(loc_id, object_name, infoOrNull,
+                    exception_when_non_existent);
+        }
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5G: Link Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Lcreate_hard creates a hard link for an already existing object.
+     * 
+     * @param obj_loc_id File, group, dataset, or datatype identifier of the existing object
+     * @param obj_name A name of the existing object
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static int H5Lcreate_hard(int obj_loc_id, String obj_name, int link_loc_id,
+            String link_name, int lcpl_id, int lapl_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5
+                    .H5Lcreate_hard(obj_loc_id, obj_name, link_loc_id, link_name, lcpl_id, lapl_id);
+        }
+    }
+
+    /**
+     * H5Lcreate_soft creates a soft link to some target path.
+     * 
+     * @param target_path The path of the link target
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static int H5Lcreate_soft(String target_path, int link_loc_id, String link_name,
+            int lcpl_id, int lapl_id) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lcreate_soft(target_path, link_loc_id, link_name, lcpl_id, lapl_id);
+        }
+    }
+
+    /**
+     * H5Lcreate_external creates an external link to some object in another file.
+     * 
+     * @param file_name File name of the link target
+     * @param obj_name Object name of the link target
+     * @param link_loc_id Location identifier of the link to create
+     * @param link_name Name of the link to create
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static int H5Lcreate_external(String file_name, String obj_name, int link_loc_id,
+            String link_name, int lcpl_id, int lapl_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lcreate_external(file_name, obj_name, link_loc_id, link_name, lcpl_id,
+                    lapl_id);
+        }
+    }
+
+    /**
+     * H5Lmove moves a link atomically to a new group or renames it.
+     * 
+     * @param src_loc_id The old location identifier of the object to be renamed
+     * @param src_name The old name of the object to be renamed
+     * @param dst_loc_id The new location identifier of the link
+     * @param dst_name The new name the object
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - current_name or name is null.
+     */
+    public static int H5Lmove(int src_loc_id, String src_name, int dst_loc_id, String dst_name,
+            int lcpl_id, int lapl_id) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lmove(src_loc_id, src_name, dst_loc_id, dst_name, lcpl_id, lapl_id);
+        }
+    }
+
+    /**
+     * H5Lexists returns <code>true</code> if a link with <var>name</var> exists and <code>false
+     * </code> otherwise.
+     * <p>
+     * <i>Note:</i> The Java wrapper differs from the low-level C routine in that it will return
+     * <code>false</code> if <var>name</var> is a path that contains groups which don't exist (the C
+     * routine will give you an <code>H5E_NOTFOUND</code> in this case).
+     */
+    public static boolean H5Lexists(int loc_id, String name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lexists(loc_id, name);
+        }
+    }
+
+    /**
+     * H5Lget_link_info returns the type of the link. If <code>lname != null</code> and
+     * <var>name</var> is a symbolic link, <code>lname[0]</code> will contain the target of the
+     * link. If <var>exception_when_non_existent</var> is <code>true</code>, the method will throw
+     * an exception when the link does not exist, otherwise -1 will be returned.
+     */
+    public static int H5Lget_link_info(int loc_id, String name, String[] lname,
+            boolean exception_when_non_existent) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lget_link_info(loc_id, name, lname, exception_when_non_existent);
+        }
+    }
+
+    /**
+     * H5Lget_link_info_all returns the names, types and link targets of all links in group
+     * <var>name</var>.
+     */
+    public static int H5Lget_link_info_all(final int loc_id, final String name,
+            final String[] oname, final int[] type, final String[] lname)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lget_link_info_all(loc_id, name, oname, type, lname);
+        }
+    }
+
+    public static int H5Lget_link_info_all(int loc_id, String name, String[] oname, int[] type,
+            String[] lname, int n) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lget_link_info_all(loc_id, name, oname, type, lname, n);
+        }
+    }
+
+    /**
+     * H5Lget_link_names_all returns the names of all links in group <var>name</var>.
+     */
+    public static int H5Lget_link_names_all(final int loc_id, final String name,
+            final String[] oname) throws HDF5LibraryException, NullPointerException
+            {
+                synchronized (ncsa.hdf.hdf5lib.H5.class)
+                {
+                    return H5.H5Lget_link_names_all(loc_id, name, oname);
+                }
+            }
+
+    public static int H5Lget_link_names_all(int loc_id, String name, String[] oname, int n)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Lget_link_names_all(loc_id, name, oname, n);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5General.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5General.java
new file mode 100644
index 0000000..318731a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5General.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 general functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5General
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /** Call to ensure that the native library is loaded. */
+    public static void ensureNativeLibIsLoaded()
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5: General Library Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5open initialize the library.
+     * 
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5open() throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5open();
+        }
+    }
+
+    /**
+     * H5get_libversion retrieves the major, minor, and release numbers of the version of the HDF
+     * library which is linked to the application.
+     * 
+     * @param libversion The version information of the HDF library.
+     * 
+     *            <pre>
+     * 
+     *            libversion[0] = The major version of the library. libversion[1] = The minor
+     *            version of the library. libversion[2] = The release number of the library.
+     * 
+     * </pre>
+     * @return a non-negative value if successful, along with the version information.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5get_libversion(int[] libversion) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5get_libversion(libversion);
+        }
+    }
+
+    public static int H5set_free_list_limits(int reg_global_lim, int reg_list_lim,
+            int arr_global_lim, int arr_list_lim, int blk_global_lim, int blk_list_lim)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5set_free_list_limits(reg_global_lim, reg_list_lim, arr_global_lim,
+                    arr_list_lim, blk_global_lim, blk_list_lim);
+        }
+    }
+
+    public static int H5Zfilter_avail(int filter) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Zfilter_avail(filter);
+        }
+    }
+
+    public static int H5Zunregister(int filter) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Zunregister(filter);
+        }
+    }
+
+    public static int H5Zget_filter_info(int filter) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Zget_filter_info(filter);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5P.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5P.java
new file mode 100644
index 0000000..27cb001
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5P.java
@@ -0,0 +1,1448 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 property list functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5P
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Pcreate creates a new property as an instance of some property list class.
+     * 
+     * @param type IN: The type of property list to create.
+     * @return a property list identifier (plist) if successful; otherwise Fail (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pcreate(int type) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcreate(type);
+        }
+    }
+
+    /**
+     * H5Pclose terminates access to a property list.
+     * 
+     * @param plist IN: Identifier of the property list to terminate access to.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pclose(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pclose(plist);
+        }
+    }
+
+    /**
+     * H5Pget_class returns the property list class for the property list identified by the plist
+     * parameter.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @return a property list class if successful. Otherwise returns H5P_NO_CLASS (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pget_class(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_class(plist);
+        }
+    }
+
+    /**
+     * H5Pcopy copies an existing property list to create a new property list.
+     * 
+     * @param plist IN: Identifier of property list to duplicate.
+     * @return a property list identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pcopy(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcopy(plist);
+        }
+    }
+
+    /**
+     * H5Pget_version retrieves the version information of various objects for a file creation
+     * property list.
+     * 
+     * @param plist IN: Identifier of the file creation property list.
+     * @param version_info OUT: version information.
+     * 
+     *            <pre>
+     * 
+     *            version_info[0] = boot // boot block version number version_info[1] = freelist //
+     *            global freelist version version_info[2] = stab // symbol tabl version number
+     *            version_info[3] = shhdr // hared object header version
+     * 
+     * </pre>
+     * @return a non-negative value, with the values of version_info initialized, if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - version_info is null.
+     * @exception IllegalArgumentException - version_info is illegal.
+     */
+    public static int H5Pget_version(int plist, int[] version_info) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_version(plist, version_info);
+        }
+    }
+
+    /**
+     * H5Pset_userblock sets the user block size of a file creation property list.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param size IN: Size of the user-block in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_userblock(int plist, long size) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_userblock(plist, size);
+        }
+    }
+
+    /**
+     * H5Pget_userblock retrieves the size of a user block in a file creation property list.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param size OUT: Pointer to location to return user-block size.
+     * @return a non-negative value and the size of the user block; if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     */
+    public static int H5Pget_userblock(int plist, long[] size) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_userblock(plist, size);
+        }
+    }
+
+    /**
+     * H5Pset_small_data_block_size reserves blocks of size bytes for the contiguous storage of the
+     * raw data portion of small datasets.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param size IN: Size of the blocks in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_small_data_block_size(int plist, long size)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_small_data_block_size(plist, size);
+        }
+    }
+
+    /**
+     * H5Pget_small_data_block_size retrieves the size of a block of small data in a file creation
+     * property list.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param size OUT: Pointer to location to return block size.
+     * @return a non-negative value and the size of the user block; if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     */
+    public static int H5Pget_small_data_block_size(int plist, long[] size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_small_data_block_size(plist, size);
+        }
+    }
+
+    /**
+     * H5Pset_sizes sets the byte size of the offsets and lengths used to address objects in an HDF5
+     * file.
+     * 
+     * @param plist IN: Identifier of property list to modify.
+     * @param sizeof_addr IN: Size of an object offset in bytes.
+     * @param sizeof_size IN: Size of an object length in bytes.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_sizes(int plist, int sizeof_addr, int sizeof_size)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_sizes(plist, sizeof_addr, sizeof_size);
+        }
+    }
+
+    /**
+     * H5Pget_sizes retrieves the size of the offsets and lengths used in an HDF5 file. This
+     * function is only valid for file creation property lists.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param size OUT: the size of the offsets and length.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = sizeof_addr // offset size in bytes size[1] = sizeof_size // length size
+     *            in bytes
+     * 
+     * </pre>
+     * @return a non-negative value with the sizes initialized; if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     * @exception IllegalArgumentException - size is invalid.
+     */
+    public static int H5Pget_sizes(int plist, int[] size) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_sizes(plist, size);
+        }
+    }
+
+    /**
+     * H5Pset_sym_k sets the size of parameters used to control the symbol table nodes.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param ik IN: Symbol table tree rank.
+     * @param lk IN: Symbol table node size.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_sym_k(int plist, int ik, int lk) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_sym_k(plist, ik, lk);
+        }
+    }
+
+    /**
+     * H5Pget_sym_k retrieves the size of the symbol table B-tree 1/2 rank and the symbol table leaf
+     * node 1/2 size.
+     * 
+     * @param plist IN: Property list to query.
+     * @param size OUT: the symbol table's B-tree 1/2 rank and leaf node 1/2 size.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = ik // the symbol table's B-tree 1/2 rank size[1] = lk // leaf node 1/2
+     *            size
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - size is null.
+     * @exception IllegalArgumentException - size is invalid.
+     */
+    public static int H5Pget_sym_k(int plist, int[] size) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_sym_k(plist, size);
+        }
+    }
+
+    /**
+     * H5Pset_istore_k sets the size of the parameter used to control the B-trees for indexing
+     * chunked datasets.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param ik IN: 1/2 rank of chunked storage B-tree.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_istore_k(int plist, int ik) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_istore_k(plist, ik);
+        }
+    }
+
+    /**
+     * H5Pget_istore_k queries the 1/2 rank of an indexed storage B-tree.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param ik OUT: Pointer to location to return the chunked storage B-tree 1/2 rank.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - ik array is null.
+     */
+    public static int H5Pget_istore_k(int plist, int[] ik) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_istore_k(plist, ik);
+        }
+    }
+
+    /**
+     * H5Pset_layout sets the type of storage used store the raw data for a dataset.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param layout IN: Type of storage layout for raw data.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_layout(int plist, int layout) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_layout(plist, layout);
+        }
+    }
+
+    /**
+     * H5Pget_layout returns the layout of the raw data for a dataset.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @return the layout type of a dataset creation property list if successful. Otherwise returns
+     *         H5D_LAYOUT_ERROR (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pget_layout(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_layout(plist);
+        }
+    }
+
+    /**
+     * H5Pset_chunk sets the size of the chunks used to store a chunked layout dataset.
+     * 
+     * @param plist IN: Identifier for property list to query.
+     * @param ndims IN: The number of dimensions of each chunk.
+     * @param dim IN: An array containing the size of each chunk.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims array is null.
+     * @exception IllegalArgumentException - dims <=0
+     */
+    public static int H5Pset_chunk(int plist, int ndims, byte[] dim) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_chunk(plist, ndims, dim);
+        }
+    }
+
+    public static int H5Pset_chunk(final int plist, final int ndims, final long[] dim)
+            throws HDF5Exception, NullPointerException, IllegalArgumentException
+    {
+        if (dim == null)
+        {
+            return -1;
+        }
+
+        final byte[] thedims = HDFNativeData.longToByte(dim);
+
+        return H5Pset_chunk(plist, ndims, thedims);
+    }
+
+    /**
+     * H5Pget_chunk retrieves the size of chunks for the raw data of a chunked layout dataset.
+     * 
+     * @param plist IN: Identifier of property list to query.
+     * @param max_ndims IN: Size of the dims array.
+     * @param dims OUT: Array to store the chunk dimensions.
+     * @return chunk dimensionality successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims array is null.
+     * @exception IllegalArgumentException - max_ndims <=0
+     */
+    public static int H5Pget_chunk(int plist, int max_ndims, long[] dims)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_chunk(plist, max_ndims, dims);
+        }
+    }
+
+    /**
+     * H5Pset_alignment sets the alignment properties of a file access property list so that any
+     * file object >= THRESHOLD bytes will be aligned on an address which is a multiple of
+     * ALIGNMENT.
+     * 
+     * @param plist IN: Identifier for a file access property list.
+     * @param threshold IN: Threshold value.
+     * @param alignment IN: Alignment value.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_alignment(int plist, long threshold, long alignment)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_alignment(plist, threshold, alignment);
+        }
+    }
+
+    /**
+     * H5Pget_alignment retrieves the current settings for alignment properties from a file access
+     * property list.
+     * 
+     * @param plist IN: Identifier of a file access property list.
+     * @param alignment OUT: threshold value and alignment value.
+     * 
+     *            <pre>
+     * 
+     *            alignment[0] = threshold // threshold value alignment[1] = alignment // alignment
+     *            value
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - aligment array is null.
+     * @exception IllegalArgumentException - aligment array is invalid.
+     */
+    public static int H5Pget_alignment(int plist, long[] alignment) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_alignment(plist, alignment);
+        }
+    }
+
+    /**
+     * H5Pset_external adds an external file to the list of external files.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @param name IN: Name of an external file.
+     * @param offset IN: Offset, in bytes, from the beginning of the file to the location in the
+     *            file where the data starts.
+     * @param size IN: Number of bytes reserved in the file for the data.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Pset_external(int plist, String name, long offset, long size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_external(plist, name, offset, size);
+        }
+    }
+
+    /**
+     * H5Pget_external_count returns the number of external files for the specified dataset.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @return the number of external files if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pget_external_count(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_external_count(plist);
+        }
+    }
+
+    /**
+     * H5Pget_external returns information about an external file.
+     * 
+     * @param plist IN: Identifier of a dataset creation property list.
+     * @param idx IN: External file index.
+     * @param name_size IN: Maximum length of name array.
+     * @param name OUT: Name of the external file.
+     * @param size OUT: the offset value and the size of the external file data.
+     * 
+     *            <pre>
+     * 
+     *            size[0] = offset // a location to return an offset value size[1] = size // a
+     *            location to return the size of // the external file data.
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception ArrayIndexOutOfBoundsException Fatal error on Copyback
+     * @exception ArrayStoreException Fatal error on Copyback
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name or size is null.
+     * @exception IllegalArgumentException - name_size <= 0 .
+     */
+    public static int H5Pget_external(int plist, int idx, int name_size, String[] name, long[] size)
+            throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_external(plist, idx, name_size, name, size);
+        }
+    }
+
+    /**
+     * H5Pset_fill_value sets the fill value for a dataset creation property list.
+     * 
+     * @param plist_id IN: Property list identifier.
+     * @param type_id IN: The datatype identifier of value.
+     * @param value IN: The fill value.
+     * @return a non-negative value if successful
+     * @exception HDF5Exception - Error converting data array
+     */
+    public static int H5Pset_fill_value(int plist_id, int type_id, byte[] value)
+            throws HDF5Exception
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fill_value(plist_id, type_id, value);
+        }
+    }
+
+    /**
+     * H5Pget_fill_value queries the fill value property of a dataset creation property list. <b>NOT
+     * IMPLEMENTED YET</B>
+     * 
+     * @param plist_id IN: Property list identifier.
+     * @param type_id IN: The datatype identifier of value.
+     * @param value IN: The fill value.
+     * @return a non-negative value if successful
+     */
+    public static int H5Pget_fill_value(int plist_id, int type_id, byte[] value)
+            throws HDF5Exception
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_fill_value(plist_id, type_id, value);
+        }
+    }
+
+    /**
+     * H5Pset_filter adds the specified filter and corresponding properties to the end of an output
+     * filter pipeline.
+     * 
+     * @param plist IN: Property list identifier.
+     * @param filter IN: Filter to be added to the pipeline.
+     * @param flags IN: Bit vector specifying certain general properties of the filter.
+     * @param cd_nelmts IN: Number of elements in cd_values
+     * @param cd_values IN: Auxiliary data for the filter.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_filter(int plist, int filter, int flags, int cd_nelmts, int[] cd_values)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_filter(plist, filter, flags, cd_nelmts, cd_values);
+        }
+    }
+
+    /**
+     * H5Pget_nfilters returns the number of filters defined in the filter pipeline associated with
+     * the property list plist.
+     * 
+     * @param plist IN: Property list identifier.
+     * @return the number of filters in the pipeline if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pget_nfilters(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_nfilters(plist);
+        }
+    }
+
+    /**
+     * H5Pget_filter returns information about a filter, specified by its filter number, in a filter
+     * pipeline, specified by the property list with which it is associated.
+     * 
+     * @param plist IN: Property list identifier.
+     * @param filter_number IN: Sequence number within the filter pipeline of the filter for which
+     *            information is sought.
+     * @param flags OUT: Bit vector specifying certain general properties of the filter.
+     * @param cd_nelmts IN/OUT: Number of elements in cd_values
+     * @param cd_values OUT: Auxiliary data for the filter.
+     * @param namelen IN: Anticipated number of characters in name.
+     * @param name OUT: Name of the filter.
+     * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR
+     *         (-1).
+     * @exception ArrayIndexOutOfBoundsException Fatal error on Copyback
+     * @exception ArrayStoreException Fatal error on Copyback
+     * @exception NullPointerException - name or an array is null.
+     */
+    public static int H5Pget_filter(int plist, int filter_number, int[] flags, int[] cd_nelmts,
+            int[] cd_values, int namelen, String[] name) throws ArrayIndexOutOfBoundsException,
+            ArrayStoreException, HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_filter(plist, filter_number, flags, cd_nelmts, cd_values, namelen,
+                    name);
+        }
+    }
+
+    /**
+     * H5Pset_cache sets the number of elements (objects) in the meta data cache and the total
+     * number of bytes in the raw data chunk cache.
+     * 
+     * @param plist IN: Identifier of the file access property list.
+     * @param mdc_nelmts IN: Number of elements (objects) in the meta data cache.
+     * @param rdcc_nbytes IN: Total size of the raw data chunk cache, in bytes.
+     * @param rdcc_w0 IN: Preemption policy.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_cache(int plist, int mdc_nelmts, int rdcc_nelmts, int rdcc_nbytes,
+            double rdcc_w0) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_cache(plist, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+        }
+    }
+
+    /**
+     * Retrieves the maximum possible number of elements in the meta data cache and the maximum
+     * possible number of bytes and the RDCC_W0 value in the raw data chunk cache.
+     * 
+     * @param plist IN: Identifier of the file access property list.
+     * @param mdc_nelmts IN/OUT: Number of elements (objects) in the meta data cache.
+     * @param rdcc_nbytes IN/OUT: Total size of the raw data chunk cache, in bytes.
+     * @param rdcc_w0 IN/OUT: Preemption policy.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an array is null.
+     */
+    public static int H5Pget_cache(int plist, int[] mdc_nelmts, int[] rdcc_nelmts,
+            int[] rdcc_nbytes, double[] rdcc_w0) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_cache(plist, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+        }
+    }
+
+    /**
+     * H5Pset_buffer sets type conversion and background buffers. status to TRUE or FALSE. Given a
+     * dataset transfer property list, H5Pset_buffer sets the maximum size for the type conversion
+     * buffer and background buffer and optionally supplies pointers to application-allocated
+     * buffers. If the buffer size is smaller than the entire amount of data being transferred
+     * between the application and the file, and a type conversion buffer or background buffer is
+     * required, then strip mining will be used. Note that there are minimum size requirements for
+     * the buffer. Strip mining can only break the data up along the first dimension, so the buffer
+     * must be large enough to accommodate a complete slice that encompasses all of the remaining
+     * dimensions. For example, when strip mining a 100x200x300 hyperslab of a simple data space,
+     * the buffer must be large enough to hold 1x200x300 data elements. When strip mining a
+     * 100x200x300x150 hyperslab of a simple data space, the buffer must be large enough to hold
+     * 1x200x300x150 data elements. If tconv and/or bkg are null pointers, then buffers will be
+     * allocated and freed during the data transfer.
+     * 
+     * @param plist Identifier for the dataset transfer property list.
+     * @param size Size, in bytes, of the type conversion and background buffers.
+     * @param tconv byte array of application-allocated type conversion buffer.
+     * @param bkg byte array of application-allocated background buffer.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static int H5Pset_buffer(int plist, int size, byte[] tconv, byte[] bkg)
+            throws HDF5LibraryException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_buffer(plist, size, tconv, bkg);
+        }
+    }
+
+    /**
+     * HH5Pget_buffer gets type conversion and background buffers. Returns buffer size, in bytes, if
+     * successful; otherwise 0 on failure.
+     * 
+     * @param plist Identifier for the dataset transfer property list.
+     * @param tconv byte array of application-allocated type conversion buffer.
+     * @param bkg byte array of application-allocated background buffer.
+     * @return buffer size, in bytes, if successful; otherwise 0 on failure
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static int H5Pget_buffer(int plist, byte[] tconv, byte[] bkg)
+            throws HDF5LibraryException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_buffer(plist, tconv, bkg);
+        }
+    }
+
+    /**
+     * H5Pset_preserve sets the dataset transfer property list status to TRUE or FALSE.
+     * 
+     * @param plist IN: Identifier for the dataset transfer property list.
+     * @param status IN: Status of for the dataset transfer property list.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception IllegalArgumentException - plist is invalid.
+     */
+    public static int H5Pset_preserve(int plist, boolean status) throws HDF5LibraryException,
+            IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_preserve(plist, status);
+        }
+    }
+
+    /**
+     * H5Pget_preserve checks the status of the dataset transfer property list.
+     * 
+     * @param plist IN: Identifier for the dataset transfer property list.
+     * @return TRUE or FALSE if successful; otherwise returns a negative value
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pget_preserve(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_preserve(plist);
+        }
+    }
+
+    /**
+     * H5Pset_deflate sets the compression method for a dataset.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @param level IN: Compression level.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_deflate(int plist, int level) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_deflate(plist, level);
+        }
+    }
+
+    /**
+     * H5Pset_nbit sets the compression method for a dataset to n-bits.
+     * <p>
+     * Keeps only n-bits from an integer or float value.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_nbit(int plist) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_nbit(plist);
+        }
+    }
+
+    /**
+     * H5Pset_scaleoffset sets the compression method for a dataset to scale_offset.
+     * <p>
+     * Generally speaking, Scale-Offset compression performs a scale and/or offset operation on each
+     * data value and truncates the resulting value to a minimum number of bits (MinBits) before
+     * storing it. The current Scale-Offset filter supports integer and floating-point datatype.
+     * 
+     * @param plist IN: Identifier for the dataset creation property list.
+     * @param scale_type IN: One of {@link HDF5Constants#H5Z_SO_INT},
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_DSCALE} or
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_ESCALE}. Note that
+     *            {@link HDF5Constants#H5Z_SO_FLOAT_ESCALE} is not implemented as of HDF5 1.8.2.
+     * @return non-negative if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Pset_scaleoffset(int plist, int scale_type, int scale_factor)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_scaleoffset(plist, scale_type, scale_factor);
+        }
+    }
+
+    /**
+     * H5Pset_create_intermediate_group pecifies in property list whether to create missing
+     * intermediate groups.
+     * <p>
+     * H5Pset_create_intermediate_group specifies whether to set the link creation property list
+     * lcpl_id so that calls to functions that create objects in groups different from the current
+     * working group will create intermediate groups that may be missing in the path of a new or
+     * moved object.
+     * <p>
+     * Functions that create objects in or move objects to a group other than the current working
+     * group make use of this property. H5Gcreate_anon and H5Lmove are examles of such functions.
+     * <p>
+     * If crt_intermed_group is <code>true</code>, the H5G_CRT_INTMD_GROUP will be added to lcpl_id
+     * (if it is not already there). Missing intermediate groups will be created upon calls to
+     * functions such as those listed above that use lcpl_id.
+     * <p>
+     * If crt_intermed_group is <code>false</code>, the H5G_CRT_INTMD_GROUP, if present, will be
+     * removed from lcpl_id. Missing intermediate groups will not be created upon calls to functions
+     * such as those listed above that use lcpl_id.
+     * 
+     * @param lcpl_id Link creation property list identifier
+     * @param crt_intermed_group Flag specifying whether to create intermediate groups upon the
+     *            creation of an object
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static void H5Pset_create_intermediate_group(int lcpl_id, boolean crt_intermed_group)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            H5.H5Pset_create_intermediate_group(lcpl_id, crt_intermed_group);
+        }
+    }
+
+    /**
+     * Determines whether property is set to enable creating missing intermediate groups.
+     * 
+     * @return <code>true</code> if intermediate groups are created, <code>false</code> otherwise.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static boolean H5Pget_create_intermediate_group(int lcpl_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_create_intermediate_group(lcpl_id);
+        }
+    }
+
+    /**
+     * Returns a dataset transfer property list (<code>H5P_DATASET_XFER</code>) that has a
+     * conversion exception handler set which abort conversions that triggers overflows.
+     */
+    public static int H5Pcreate_xfer_abort_overflow()
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcreate_xfer_abort_overflow();
+        }
+    }
+
+    /**
+     * Returns a dataset transfer property list (<code>H5P_DATASET_XFER</code>) that has a
+     * conversion exception handler set which aborts all conversions.
+     */
+    public static int H5Pcreate_xfer_abort()
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcreate_xfer_abort();
+        }
+    }
+
+    public static int H5Pset_alloc_time(int plist_id, int alloc_time) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_alloc_time(plist_id, alloc_time);
+        }
+    }
+
+    public static int H5Pget_alloc_time(int plist_id, int[] alloc_time)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_alloc_time(plist_id, alloc_time);
+        }
+    }
+
+    public static int H5Pset_fill_time(int plist_id, int fill_time) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fill_time(plist_id, fill_time);
+        }
+    }
+
+    public static int H5Pget_fill_time(int plist_id, int[] fill_time) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_fill_time(plist_id, fill_time);
+        }
+    }
+
+    public static int H5Pfill_value_defined(int plist_id, int[] status)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pfill_value_defined(plist_id, status);
+        }
+    }
+
+    public static int H5Pset_fletcher32(int plist) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fletcher32(plist);
+        }
+    }
+
+    public static int H5Pset_edc_check(int plist, int check) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_edc_check(plist, check);
+        }
+    }
+
+    public static int H5Pget_edc_check(int plist) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_edc_check(plist);
+        }
+    }
+
+    public static int H5Pset_shuffle(int plist_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_shuffle(plist_id);
+        }
+    }
+
+    public static int H5Pmodify_filter(int plist, int filter, int flags, long cd_nelmts,
+            int[] cd_values) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pmodify_filter(plist, filter, flags, cd_nelmts, cd_values);
+        }
+    }
+
+    public static int H5Pget_filter_by_id(int plist_id, int filter, int[] flags, long[] cd_nelmts,
+            int[] cd_values, long namelen, String[] name) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_filter_by_id(plist_id, filter, flags, cd_nelmts, cd_values, namelen,
+                    name);
+        }
+    }
+
+    public static boolean H5Pall_filters_avail(int dcpl_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pall_filters_avail(dcpl_id);
+        }
+    }
+
+    public static int H5Pset_hyper_vector_size(int dxpl_id, long vector_size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_hyper_vector_size(dxpl_id, vector_size);
+        }
+    }
+
+    public static int H5Pget_hyper_vector_size(int dxpl_id, long[] vector_size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_hyper_vector_size(dxpl_id, vector_size);
+        }
+    }
+
+    public static int H5Pset_fclose_degree(int plist, int degree) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fclose_degree(plist, degree);
+        }
+    }
+
+    public static int H5Pget_fclose_degree(int plist_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_fclose_degree(plist_id);
+        }
+    }
+
+    public static int H5Pset_fapl_family(int fapl_id, long memb_size, int memb_fapl_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fapl_family(fapl_id, memb_size, memb_fapl_id);
+        }
+    }
+
+    public static int H5Pget_fapl_family(int fapl_id, long[] memb_size, int[] memb_fapl_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_fapl_family(fapl_id, memb_size, memb_fapl_id);
+        }
+    }
+
+    public static int H5Pset_fapl_core(int fapl_id, int increment, boolean backing_store)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fapl_core(fapl_id, increment, backing_store);
+        }
+    }
+
+    public static int H5Pget_fapl_core(int fapl_id, int[] increment, boolean[] backing_store)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_fapl_core(fapl_id, increment, backing_store);
+        }
+    }
+
+    public static int H5Pset_family_offset(int fapl_id, long offset) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_family_offset(fapl_id, offset);
+        }
+    }
+
+    public static long H5Pget_family_offset(int fapl_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_family_offset(fapl_id);
+        }
+    }
+
+    public static int H5Pset_fapl_log(int fapl_id, String logfile, int flags, int buf_size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_fapl_log(fapl_id, logfile, flags, buf_size);
+        }
+    }
+
+    public static int H5Premove_filter(int obj_id, int filter) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Premove_filter(obj_id, filter);
+        }
+    }
+
+    /**
+     * Creates a new property list class of a given class
+     * 
+     * @param cls IN: Class of property list to create
+     * @return a valid property list identifier if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pcreate_list(int cls) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcreate_list(cls);
+        }
+    }
+
+    /**
+     * Sets a property list value (support integer only)
+     * 
+     * @param plid IN: Property list identifier to modify
+     * @param name IN: Name of property to modify
+     * @param value IN: value to set the property to
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pset(int plid, String name, int value) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset(plid, name, value);
+        }
+    }
+
+    /**
+     * H5Pexist determines whether a property exists within a property list or class
+     * 
+     * @param plid IN: Identifier for the property to query
+     * @param name IN: Name of property to check for
+     * @return a positive value if the property exists in the property object; zero if the property
+     *         does not exist; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pexist(int plid, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pexist(plid, name);
+        }
+    }
+
+    /**
+     * H5Pget_size retrieves the size of a property's value in bytes
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @param name IN: Name of property to query
+     * @return size of a property's value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static long H5Pget_size(int plid, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_size(plid, name);
+        }
+    }
+
+    /**
+     * H5Pget_nprops retrieves the number of properties in a property list or class
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @return number of properties if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static long H5Pget_nprops(int plid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_nprops(plid);
+        }
+    }
+
+    /**
+     * H5Pget_class_name retrieves the name of a generic property list class
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @return name of a property list if successful; null if failed
+     * @throws HDF5LibraryException
+     */
+    public static String H5Pget_class_name(int plid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_class_name(plid);
+        }
+    }
+
+    /**
+     * H5Pget_class_parent retrieves an identifier for the parent class of a property class
+     * 
+     * @param plid IN: Identifier of the property class to query
+     * @return a valid parent class object identifier if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pget_class_parent(int plid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_class_parent(plid);
+        }
+    }
+
+    /**
+     * H5Pisa_class checks to determine whether a property list is a member of the specified class
+     * 
+     * @param plist IN: Identifier of the property list
+     * @param pclass IN: Identifier of the property class
+     * @return a positive value if equal; zero if unequal; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pisa_class(int plist, int pclass) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pisa_class(plist, pclass);
+        }
+    }
+
+    /**
+     * H5Pget retrieves a copy of the value for a property in a property list (support integer only)
+     * 
+     * @param plid IN: Identifier of property object to query
+     * @param name IN: Name of property to query
+     * @return value for a property if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pget(int plid, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget(plid, name);
+        }
+    }
+
+    /**
+     * H5Pequal determines if two property lists or classes are equal
+     * 
+     * @param plid1 IN: First property object to be compared
+     * @param plid2 IN: Second property object to be compared
+     * @return positive value if equal; zero if unequal, a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pequal(int plid1, int plid2) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pequal(plid1, plid2);
+        }
+    }
+
+    /**
+     * H5Pcopy_prop copies a property from one property list or class to another
+     * 
+     * @param dst_id IN: Identifier of the destination property list or class
+     * @param src_id IN: Identifier of the source property list or class
+     * @param name IN: Name of the property to copy
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pcopy_prop(int dst_id, int src_id, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pcopy_prop(dst_id, src_id, name);
+        }
+    }
+
+    /**
+     * H5Premove removes a property from a property list
+     * 
+     * @param plid IN: Identifier of the property list to modify
+     * @param name IN: Name of property to remove
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Premove(int plid, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Premove(plid, name);
+        }
+    }
+
+    /**
+     * H5Punregister removes a property from a property list class
+     * 
+     * @param plid IN: Property list class from which to remove permanent property
+     * @param name IN: Name of property to remove
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Punregister(int plid, String name) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Punregister(plid, name);
+        }
+    }
+
+    /**
+     * Closes an existing property list class
+     * 
+     * @param plid IN: Property list class to close
+     * @return a non-negative value if successful; a negative value if failed
+     * @throws HDF5LibraryException
+     */
+    public static int H5Pclose_class(int plid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pclose_class(plid);
+        }
+    }
+
+    /**
+     * Sets the permissible bounds of the library's file format versions.
+     * <p>
+     * Can be set on the file access property list.
+     * <p>
+     * As of 1.8.0, only the combinations <code>low=H5F_LIBVER_EARLIEST</code> / <code>
+     * high=H5F_LIBVER_LATEST</code> (which is the default and means that 1.6 compatible files are
+     * created if no features are used that require a 1.8 format) and <code>low=H5F_LIBVER_LATEST
+     * </code> / <code>high=H5F_LIBVER_LATEST</code> (which means that always 1.8 files are created
+     * which cannot be read by an earlier library) are allowed.
+     * 
+     * @param plist_id Property list identifier.
+     * @param low The lower permissible bound. One of <code>H5F_LIBVER_LATEST</code> or <code>
+     *            H5F_LIBVER_LATEST</code> .
+     * @param high The higher permissible bound. Must be <code>H5F_LIBVER_LATEST</code>.
+     * @return a non-negative value if successful
+     */
+    public static int H5Pset_libver_bounds(int plist_id, int low, int high)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_libver_bounds(plist_id, low, high);
+        }
+    }
+
+    /**
+     * Returns the permissible bounds of the library's file format versions.
+     * 
+     * @param plist_id Property list identifier.
+     * @return an array containing <code>[low, high]</code> on success
+     */
+    public static int[] H5Pget_libver_bounds(int plist_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_libver_bounds(plist_id);
+        }
+    }
+
+    /**
+     * Sets the local heap size hint for an old-style group. This is the chunk size allocated on the
+     * heap for a group.
+     * 
+     * @param gcpl_id The group creation property list to change the heap size hint for
+     * @param size_hint The size hint to set.
+     * @return a non-negative value if successful
+     */
+    public static int H5Pset_local_heap_size_hint(int gcpl_id, int size_hint)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_local_heap_size_hint(gcpl_id, size_hint);
+        }
+    }
+
+    /**
+     * Returns the local heap size hint for an old-style group. This is the chunk size allocated on
+     * the heap for a group.
+     * 
+     * @param gcpl_id The group creation property list to change the heap size hint for
+     * @return The size hint of the group if successful
+     */
+    public static int H5Pget_local_heap_size_hint(int gcpl_id)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_local_heap_size_hint(gcpl_id);
+        }
+    }
+
+    /**
+     * Sets the phase change parameters for a new-style group.
+     * 
+     * @param gcpl_id The group creation property list to set the link phase changes for
+     * @param max_compact The maximum number of links in a group to store as header messages
+     * @param min_dense The minimum number of links in a group to in the dense format
+     * @return a non-negative value if successful
+     */
+    public static int H5Pset_link_phase_change(int gcpl_id, int max_compact, int min_dense)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_link_phase_change(gcpl_id, max_compact, min_dense);
+        }
+    }
+
+    /**
+     * Returns the phase change parameters for a new-style group.
+     * 
+     * @param gcpl_id The group creation property list to set the link phase changes for
+     * @return the phase change parameters as array [max_compact, min_dense] if successful
+     */
+    public static int[] H5Pget_link_phase_change(int gcpl_id)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_link_phase_change(gcpl_id);
+        }
+    }
+
+    /**
+     * Sets the character encoding for the given creation property list to the given encoding.
+     * 
+     * @param cpl_id The creation property list to set the character encoding for.
+     * @param encoding The encoding (one of {@link HDF5Constants#H5T_CSET_ASCII} or
+     *            {@link HDF5Constants#H5T_CSET_UTF8}) to use.
+     * @return a non-negative value if successful
+     */
+    public static int H5Pset_char_encoding(int cpl_id, int encoding)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pset_char_encoding(cpl_id, encoding);
+        }
+    }
+
+    /**
+     * Returns the character encoding currently set for a creation property list.
+     * 
+     * @param cpl_id The creation property list to get the character encoding for.
+     * @return The encoding, one of {@link HDF5Constants#H5T_CSET_ASCII} or
+     *         {@link HDF5Constants#H5T_CSET_UTF8}.
+     */
+    public static int H5Pget_char_encoding(int cpl_id)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Pget_char_encoding(cpl_id);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5RI.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5RI.java
new file mode 100644
index 0000000..8a81ce4
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5RI.java
@@ -0,0 +1,286 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 reference and identifier functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5RI
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5R: Reference Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Rcreate creates the reference, ref, of the type specified in ref_type, pointing to the
+     * object name located at loc_id.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param name IN: Name of object at location loc_id.
+     * @param ref_type IN: Type of reference.
+     * @param space_id IN: Dataspace identifier with selection.
+     * @return the reference (byte[]) if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception IllegalArgumentException - an input array is invalid.
+     */
+    public static byte[] H5Rcreate(final int loc_id, final String name, final int ref_type,
+            final int space_id) throws HDF5LibraryException, NullPointerException,
+            IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rcreate(loc_id, name, ref_type, space_id);
+        }
+    }
+
+    /**
+     * H5Rcreate creates the object references, pointing to the object names located at loc_id.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param name IN: Names of objects at location loc_id.
+     * @return the reference (long[]) if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception IllegalArgumentException - an input array is invalid.
+     */
+    public static long[] H5Rcreate(final int loc_id, final String[] name)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rcreate(loc_id, name);
+        }
+    }
+
+    /**
+     * Given a reference to some object, H5Rdereference opens that object and return an identifier.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param ref IN: reference to an object
+     * @return valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static int H5Rdereference(int loc_id, long ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rdereference(loc_id, ref);
+        }
+    }
+
+    /**
+     * Given a reference to some object, H5Rdereference opens that object and return an identifier.
+     * 
+     * @param loc_id IN: Location identifier used to locate the object being pointed to.
+     * @param ref_type IN: The reference type of ref.
+     * @param ref IN: reference to an object
+     * @return valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static int H5Rdereference(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rdereference(loc_id, ref_type, ref);
+        }
+    }
+
+    /**
+     * Given a reference to an object ref, H5Rget_region creates a copy of the dataspace of the
+     * dataset pointed to and defines a selection in the copy which is the region pointed to.
+     * 
+     * @param loc_id IN: loc_id of the reference object.
+     * @param ref_type IN: The reference type of ref.
+     * @param ref OUT: the reference to the object and region
+     * @return a valid identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - output array is null.
+     * @exception IllegalArgumentException - output array is invalid.
+     */
+    public static int H5Rget_region(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rget_region(loc_id, ref_type, ref);
+        }
+    }
+
+    /**
+     * Given a reference to an object, H5Rget_obj_type returns the type of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref_type Type of reference to query.
+     * @param ref The reference.
+     * @return a valid identifier if successful; otherwise a negative value is returned to signal
+     *         failure.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static int H5Rget_obj_type(int loc_id, int ref_type, byte[] ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rget_obj_type(loc_id, ref_type, ref);
+        }
+    }
+
+    /**
+     * Given a reference to an object, H5Rget_name returns the name (path) of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref_type Type of reference to query.
+     * @param ref The reference.
+     * @return The path of the object being pointed to, or an empty string, if the object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static String H5Rget_name(int loc_id, int ref_type, byte[] ref)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rget_name(loc_id, ref_type, ref);
+        }
+    }
+
+    /**
+     * Given a reference to an object, H5Rget_name returns the name (path) of the object pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref The reference.
+     * @return The path of the object being pointed to, or an empty string, if the object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static String H5Rget_name(int loc_id, long ref)
+            throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rget_name(loc_id, ref);
+        }
+    }
+
+    /**
+     * Given an array of object references (ref), H5Rget_name returns the names (paths) of the
+     * objects pointed to.
+     * 
+     * @param loc_id Identifier of the reference object.
+     * @param ref The references.
+     * @return The paths of the objects being pointed to, or an empty string, if an object being
+     *         pointed to has no name.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - array is null.
+     * @exception IllegalArgumentException - array is invalid.
+     */
+    public static String[] H5Rget_name(int loc_id, long[] ref)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Rget_name(loc_id, ref);
+        }
+    }
+
+    // ////////////////////////////////////////////////////////////
+    // //
+    // H5I: Identifier Interface Functions //
+    // //
+    // ////////////////////////////////////////////////////////////
+
+    /**
+     * H5Iget_type retrieves the type of the object identified by obj_id.
+     * 
+     * @param obj_id IN: Object identifier whose type is to be determined.
+     * @return the object type if successful; otherwise H5I_BADID.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Iget_type(int obj_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Iget_type(obj_id);
+        }
+    }
+
+    public static long H5Iget_name(int obj_id, String[] name, long size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Iget_name(obj_id, name, size);
+        }
+    }
+
+    public static int H5Iget_ref(int obj_id) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Iget_ref(obj_id);
+        }
+    }
+
+    public static int H5Iinc_ref(int obj_id) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Iinc_ref(obj_id);
+        }
+    }
+
+    public static int H5Idec_ref(int obj_id) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Idec_ref(obj_id);
+        }
+    }
+
+    public static int H5Iget_file_id(int obj_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Iget_file_id(obj_id);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5S.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5S.java
new file mode 100644
index 0000000..8dc1e3c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5S.java
@@ -0,0 +1,505 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 dataspace functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ *
+ * @author Bernd Rinn
+ */
+public class H5S
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Screate creates a new dataspace of a particular type.
+     * 
+     * @param type The type of dataspace to be created.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Screate(int type) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Screate(type);
+        }
+    }
+
+    /**
+     * H5Screate_simple creates a new simple data space and opens it for access.
+     * 
+     * @param rank Number of dimensions of dataspace.
+     * @param dims An array of the size of each dimension.
+     * @param maxdims An array of the maximum size of each dimension.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims or maxdims is null.
+     */
+    public static int H5Screate_simple(int rank, byte[] dims, byte[] maxdims)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Screate_simple(rank, dims, maxdims);
+        }
+    }
+
+    public static int H5Screate_simple(final int rank, final long[] dims, final long[] maxdims)
+            throws HDF5Exception, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Screate_simple(rank, dims, maxdims);
+        }
+    }
+
+    /**
+     * H5Scopy creates a new dataspace which is an exact copy of the dataspace identified by
+     * space_id.
+     * 
+     * @param space_id Identifier of dataspace to copy.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Scopy(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Scopy(space_id);
+        }
+    }
+
+    /**
+     * H5Sselect_elements selects array elements to be included in the selection for the space_id
+     * dataspace.
+     * 
+     * @param space_id Identifier of the dataspace.
+     * @param op operator specifying how the new selection is combined.
+     * @param num_elements Number of elements to be selected.
+     * @param coord A 2-dimensional array specifying the coordinates of the elements.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sselect_elements(int space_id, int op, int num_elements, byte[] coord)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_elements(space_id, op, num_elements, coord);
+        }
+    }
+
+    /**
+     * H5Sselect_all selects the entire extent of the dataspace space_id.
+     * 
+     * @param space_id IN: The identifier of the dataspace to be selected.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sselect_all(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_all(space_id);
+        }
+    }
+
+    /**
+     * H5Sselect_none resets the selection region for the dataspace space_id to include no elements.
+     * 
+     * @param space_id IN: The identifier of the dataspace to be reset.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sselect_none(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_none(space_id);
+        }
+    }
+
+    /**
+     * H5Sselect_valid verifies that the selection for the dataspace.
+     * 
+     * @param space_id The identifier for the dataspace in which the selection is being reset.
+     * @return true if the selection is contained within the extent and FALSE if it is not or is an
+     *         error.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static boolean H5Sselect_valid(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_valid(space_id);
+        }
+    }
+
+    /**
+     * H5Sget_simple_extent_npoints determines the number of elements in a dataspace.
+     * 
+     * @param space_id ID of the dataspace object to query
+     * @return the number of elements in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Sget_simple_extent_npoints(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_simple_extent_npoints(space_id);
+        }
+    }
+
+    /**
+     * H5Sget_select_npoints determines the number of elements in the current selection of a
+     * dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @return the number of elements in the selection if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Sget_select_npoints(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_npoints(space_id);
+        }
+    }
+
+    /**
+     * H5Sget_simple_extent_ndims determines the dimensionality (or rank) of a dataspace.
+     * 
+     * @param space_id Identifier of the dataspace
+     * @return the number of dimensions in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sget_simple_extent_ndims(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_simple_extent_ndims(space_id);
+        }
+    }
+
+    /**
+     * H5Sget_simple_extent_dims returns the size and maximum sizes of each dimension of a dataspace
+     * through the dims and maxdims parameters.
+     * 
+     * @param space_id IN: Identifier of the dataspace object to query
+     * @param dims OUT: Pointer to array to store the size of each dimension.
+     * @param maxdims OUT: Pointer to array to store the maximum size of each dimension.
+     * @return the number of dimensions in the dataspace if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - dims or maxdims is null.
+     */
+    public static int H5Sget_simple_extent_dims(int space_id, long[] dims, long[] maxdims)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_simple_extent_dims(space_id, dims, maxdims);
+        }
+    }
+
+    /**
+     * H5Sget_simple_extent_type queries a dataspace to determine the current class of a dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @return a dataspace class name if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sget_simple_extent_type(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_simple_extent_type(space_id);
+        }
+    }
+
+    /**
+     * H5Sset_extent_simple sets or resets the size of an existing dataspace.
+     * 
+     * @param space_id Dataspace identifier.
+     * @param rank Rank, or dimensionality, of the dataspace.
+     * @param current_size Array containing current size of dataspace.
+     * @param maximum_size Array containing maximum size of dataspace.
+     * @return a dataspace identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sset_extent_simple(int space_id, int rank, byte[] current_size,
+            byte[] maximum_size) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sset_extent_simple(space_id, rank, current_size, maximum_size);
+        }
+    }
+
+    public static int H5Sset_extent_simple(final int space_id, final int rank,
+            final long[] currentSize, final long[] maxSize) throws HDF5Exception,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sset_extent_simple(space_id, rank, currentSize, maxSize);
+        }
+    }
+
+    /**
+     * H5Sis_simple determines whether a dataspace is a simple dataspace.
+     * 
+     * @param space_id Identifier of the dataspace to query
+     * @return true if is a simple dataspace
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static boolean H5Sis_simple(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sis_simple(space_id);
+        }
+    }
+
+    /**
+     * H5Soffset_simple sets the offset of a simple dataspace space_id.
+     * 
+     * @param space_id IN: The identifier for the dataspace object to reset.
+     * @param offset IN: The offset at which to position the selection.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - offset array is null.
+     */
+    public static int H5Soffset_simple(int space_id, byte[] offset) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Soffset_simple(space_id, offset);
+        }
+    }
+
+    public static int H5Soffset_simple(final int space_id, final long[] offset)
+            throws HDF5Exception, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Soffset_simple(space_id, offset);
+        }
+    }
+
+    /**
+     * H5Sextent_copy copies the extent from source_space_id to dest_space_id. This action may
+     * change the type of the dataspace.
+     * 
+     * @param dest_space_id IN: The identifier for the dataspace from which the extent is copied.
+     * @param source_space_id IN: The identifier for the dataspace to which the extent is copied.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sextent_copy(int dest_space_id, int source_space_id)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sextent_copy(dest_space_id, source_space_id);
+        }
+    }
+
+    /**
+     * H5Sset_extent_none removes the extent from a dataspace and sets the type to H5S_NONE.
+     * 
+     * @param space_id The identifier for the dataspace from which the extent is to be removed.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sset_extent_none(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sset_extent_none(space_id);
+        }
+    }
+
+    /**
+     * H5Sselect_hyperslab selects a hyperslab region to add to the current selected region for the
+     * dataspace specified by space_id. The start, stride, count, and block arrays must be the same
+     * size as the rank of the dataspace.
+     * 
+     * @param space_id IN: Identifier of dataspace selection to modify
+     * @param op IN: Operation to perform on current selection.
+     * @param start IN: Offset of start of hyperslab
+     * @param count IN: Number of blocks included in hyperslab.
+     * @param stride IN: Hyperslab stride.
+     * @param block IN: Size of block in hyperslab.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - an input array is null.
+     * @exception NullPointerException - an input array is invalid.
+     */
+    public static int H5Sselect_hyperslab(int space_id, int op, byte[] start, byte[] stride,
+            byte[] count, byte[] block) throws HDF5LibraryException, NullPointerException,
+            IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_hyperslab(space_id, op, start, stride, count, block);
+        }
+    }
+
+    public static int H5Sselect_hyperslab(final int space_id, final int op, final long[] start,
+            final long[] stride, final long[] count, final long[] block) throws HDF5Exception,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sselect_hyperslab(space_id, op, start, stride, count, block);
+        }
+    }
+
+    /**
+     * H5Sclose releases a dataspace.
+     * 
+     * @param space_id Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Sclose(int space_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sclose(space_id);
+        }
+    }
+
+    /**
+     * H5Sget_select_hyper_nblocks returns the number of hyperslab blocks in the current dataspace
+     * selection.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Sget_select_hyper_nblocks(int spaceid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_hyper_nblocks(spaceid);
+        }
+    }
+
+    /**
+     * H5Sget_select_elem_npoints returns the number of element points in the current dataspace
+     * selection.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Sget_select_elem_npoints(int spaceid) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_elem_npoints(spaceid);
+        }
+    }
+
+    /**
+     * H5Sget_select_hyper_blocklist returns an array of hyperslab blocks. The block coordinates
+     * have the same dimensionality (rank) as the dataspace they are located within. The list of
+     * blocks is formatted as follows:
+     * 
+     * <pre>
+     * 
+     * <"start" coordinate>, immediately followed by <"opposite" corner
+     * coordinate>, followed by the next "start" and "opposite" coordinates,
+     * etc. until all of the selected blocks have been listed.
+     * 
+     * </pre>
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param startblock first block to retrieve
+     * @param numblocks number of blocks to retrieve
+     * @param buf returns blocks startblock to startblock+num-1, each block is <i>rank</i> * 2
+     *            (corners) longs.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static int H5Sget_select_hyper_blocklist(int spaceid, long startblock, long numblocks,
+            long[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_hyper_blocklist(spaceid, startblock, numblocks, buf);
+        }
+    }
+
+    /**
+     * H5Sget_select_elem_pointlist returns an array of of element points in the current dataspace
+     * selection. The point coordinates have the same dimensionality (rank) as the dataspace they
+     * are located within, one coordinate per point.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param startpoint first point to retrieve
+     * @param numpoints number of points to retrieve
+     * @param buf returns points startblock to startblock+num-1, each points is <i>rank</i> longs.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - buf is null.
+     */
+    public static int H5Sget_select_elem_pointlist(int spaceid, long startpoint, long numpoints,
+            long[] buf) throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_elem_pointlist(spaceid, startpoint, numpoints, buf);
+        }
+    }
+
+    /**
+     * H5Sget_select_bounds retrieves the coordinates of the bounding box containing the current
+     * selection and places them into user-supplied buffers.
+     * <P>
+     * The start and end buffers must be large enough to hold the dataspace rank number of
+     * coordinates.
+     * 
+     * @param spaceid Identifier of dataspace to release.
+     * @param start coordinates of lowest corner of bounding box.
+     * @param end coordinates of highest corner of bounding box.
+     * @return a non-negative value if successful,with start and end initialized.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - start or end is null.
+     */
+    public static int H5Sget_select_bounds(int spaceid, long[] start, long[] end)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Sget_select_bounds(spaceid, start, end);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5T.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5T.java
new file mode 100644
index 0000000..7abc313
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/H5T.java
@@ -0,0 +1,1055 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * Low-level interface for HDF5 datatype functions.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ * 
+ * @author Bernd Rinn
+ */
+public class H5T
+{
+    static
+    {
+        H5.ensureNativeLibIsLoaded();
+    }
+
+    /**
+     * H5Topen opens a named datatype at the location specified by loc_id and return an identifier
+     * for the datatype.
+     * 
+     * @param loc_id A file, group, or datatype identifier.
+     * @param name A datatype name.
+     * @param access_plist_id Datatype access property list identifier.
+     * @return a named datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Topen(int loc_id, String name, int access_plist_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Topen(loc_id, name, access_plist_id);
+        }
+    }
+
+    /**
+     * H5Tcommit commits a transient datatype (not immutable) to a file, turned it into a named
+     * datatype.
+     * 
+     * @param loc_id A file or group identifier.
+     * @param name A datatype name.
+     * @param type_id A datatype identifier.
+     * @param link_create_plist_id Link creation property list.
+     * @param dtype_create_plist_id Datatype creation property list.
+     * @param dtype_access_plist_id Datatype access property list.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tcommit(int loc_id, String name, int type_id, int link_create_plist_id,
+            int dtype_create_plist_id, int dtype_access_plist_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tcommit(loc_id, name, type_id, link_create_plist_id, dtype_create_plist_id,
+                    dtype_access_plist_id);
+        }
+    }
+
+    /**
+     * H5Tcommitted queries a type to determine whether the type specified by the type identifier is
+     * a named type or a transient type.
+     * 
+     * @param type Datatype identifier.
+     * @return true if successfully committed
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static boolean H5Tcommitted(int type) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tcommitted(type);
+        }
+    }
+
+    /**
+     * H5Tcreate creates a new dataype of the specified class with the specified number of bytes.
+     * 
+     * @param dclass Class of datatype to create.
+     * @param size The number of bytes in the datatype to create.
+     * @return datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tcreate(int dclass, int size) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tcreate(dclass, size);
+        }
+    }
+
+    /**
+     * H5Tcopy copies an existing datatype. The returned type is always transient and unlocked.
+     * 
+     * @param type_id Identifier of datatype to copy. Can be a datatype identifier, a predefined
+     *            datatype (defined in H5Tpublic.h), or a dataset Identifier.
+     * @return a datatype identifier if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tcopy(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tcopy(type_id);
+        }
+    }
+
+    /**
+     * H5Tequal determines whether two datatype identifiers refer to the same datatype.
+     * 
+     * @param type_id1 Identifier of datatype to compare.
+     * @param type_id2 Identifier of datatype to compare.
+     * @return true if the datatype identifiers refer to the same datatype, else FALSE.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static boolean H5Tequal(int type_id1, int type_id2) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tequal(type_id1, type_id2);
+        }
+    }
+
+    /**
+     * H5Tlock locks the datatype specified by the type_id identifier, making it read-only and
+     * non-destructible.
+     * 
+     * @param type_id Identifier of datatype to lock.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tlock(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tlock(type_id);
+        }
+    }
+
+    /**
+     * H5Tget_class returns the datatype class identifier.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return datatype class identifier if successful; otherwise H5T_NO_CLASS (-1).
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_class(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_class(type_id);
+        }
+    }
+
+    /**
+     * H5Tget_size returns the size of a datatype in bytes as an int value.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the size of the datatype in bytes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library, or if the size of the data
+     *                type exceeds an int
+     */
+    public static int H5Tget_size(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_size(type_id);
+        }
+    }
+
+    /**
+     * H5Tget_size returns the size of a datatype in bytes as a long value.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the size of the datatype in bytes if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Tget_size_long(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_size_long(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_size sets the total size in bytes, size, for an atomic datatype (this operation is not
+     * permitted on compound datatypes).
+     * 
+     * @param type_id Identifier of datatype to change size.
+     * @param size Size in bytes to modify datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_size(int type_id, int size) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_size(type_id, size);
+        }
+    }
+
+    /**
+     * H5Tget_order returns the byte order of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a byte order constant if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_order(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_order(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_order sets the byte ordering of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param order Byte ordering constant.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_order(int type_id, int order) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_order(type_id, order);
+        }
+    }
+
+    /**
+     * H5Tget_precision returns the precision of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the number of significant bits if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_precision(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_precision(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_precision sets the precision of an atomic datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param precision Number of bits of precision for datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_precision(int type_id, int precision) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_precision(type_id, precision);
+        }
+    }
+
+    /**
+     * H5Tget_offset retrieves the bit offset of the first significant bit.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a positive offset value if successful; otherwise 0.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_offset(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_offset(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_offset sets the bit offset of the first significant bit.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param offset Offset of first significant bit.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_offset(int type_id, int offset) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_offset(type_id, offset);
+        }
+    }
+
+    /**
+     * H5Tget_pad retrieves the padding type of the least and most-significant bit padding.
+     * 
+     * @param type_id IN: Identifier of datatype to query.
+     * @param pad OUT: locations to return least-significant and most-significant bit padding type.
+     * 
+     *            <pre>
+     * 
+     *            pad[0] = lsb // least-significant bit padding type pad[1] = msb //
+     *            most-significant bit padding type
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - pad is null.
+     */
+    public static int H5Tget_pad(int type_id, int[] pad) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_pad(type_id, pad);
+        }
+    }
+
+    /**
+     * H5Tset_pad sets the least and most-significant bits padding types.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param lsb Padding type for least-significant bits.
+     * @param msb Padding type for most-significant bits.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_pad(int type_id, int lsb, int msb) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_pad(type_id, lsb, msb);
+        }
+    }
+
+    /**
+     * H5Tget_sign retrieves the sign type for an integer type.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid sign type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_sign(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_sign(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_sign sets the sign proprety for an integer type.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param sign Sign type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_sign(int type_id, int sign) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_sign(type_id, sign);
+        }
+    }
+
+    /**
+     * H5Tget_fields retrieves information about the locations of the various bit fields of a
+     * floating point datatype.
+     * 
+     * @param type_id IN: Identifier of datatype to query.
+     * @param fields OUT: location of size and bit-position.
+     * 
+     *            <pre>
+     * 
+     *            fields[0] = spos OUT: location to return size of in bits. fields[1] = epos OUT:
+     *            location to return exponent bit-position. fields[2] = esize OUT: location to
+     *            return size of exponent in bits. fields[3] = mpos OUT: location to return mantissa
+     *            bit-position. fields[4] = msize OUT: location to return size of mantissa in bits.
+     * 
+     * </pre>
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - fileds is null.
+     * @exception IllegalArgumentException - fileds array is invalid.
+     */
+    public static int H5Tget_fields(int type_id, int[] fields) throws HDF5LibraryException,
+            NullPointerException, IllegalArgumentException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_fields(type_id, fields);
+        }
+    }
+
+    /**
+     * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param spos Size position.
+     * @param epos Exponent bit position.
+     * @param esize Size of exponent in bits.
+     * @param mpos Mantissa bit position.
+     * @param msize Size of mantissa in bits.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_fields(int type_id, int spos, int epos, int esize, int mpos, int msize)
+            throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_fields(type_id, spos, epos, esize, mpos, msize);
+        }
+    }
+
+    /**
+     * H5Tget_ebias retrieves the exponent bias of a floating-point type.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return the bias if successful; otherwise 0.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_ebias(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_ebias(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_ebias sets the exponent bias of a floating-point type.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param ebias Exponent bias value.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_ebias(int type_id, int ebias) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_ebias(type_id, ebias);
+        }
+    }
+
+    /**
+     * H5Tget_norm retrieves the mantissa normalization of a floating-point datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid normalization type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_norm(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_norm(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_norm sets the mantissa normalization of a floating-point datatype.
+     * 
+     * @param type_id Identifier of datatype to set.
+     * @param norm Mantissa normalization type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_norm(int type_id, int norm) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_norm(type_id, norm);
+        }
+    }
+
+    /**
+     * H5Tget_inpad retrieves the internal padding type for unused bits in floating-point datatypes.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid padding type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_inpad(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_inpad(type_id);
+        }
+    }
+
+    /**
+     * If any internal bits of a floating point type are unused (that is, those significant bits
+     * which are not part of the sign, exponent, or mantissa), then H5Tset_inpad will be filled
+     * according to the value of the padding value property inpad.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param inpad Padding type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_inpad(int type_id, int inpad) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_inpad(type_id, inpad);
+        }
+    }
+
+    /**
+     * H5Tget_cset retrieves the character set type of a string datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid character set type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_cset(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_cset(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_cset the character set to be used.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param cset Character set type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_cset(int type_id, int cset) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_cset(type_id, cset);
+        }
+    }
+
+    /**
+     * H5Tget_strpad retrieves the string padding method for a string datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return a valid string padding type if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_strpad(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_strpad(type_id);
+        }
+    }
+
+    /**
+     * H5Tset_strpad defines the storage mechanism for the string.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @param strpad String padding type.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_strpad(int type_id, int strpad) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_strpad(type_id, strpad);
+        }
+    }
+
+    /**
+     * H5Tget_nmembers retrieves the number of fields a compound datatype has.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @return number of members datatype has if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_nmembers(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_nmembers(type_id);
+        }
+    }
+
+    /**
+     * H5Tget_member_name retrieves the name of a field of a compound datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field name to retrieve.
+     * @return a valid pointer if successful; otherwise null.
+     */
+    public static String H5Tget_member_name(int type_id, int field_idx)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_name(type_id, field_idx);
+        }
+    }
+
+    /**
+     * H5Tget_member_index retrieves the index of a field of a compound datatype.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_name Field name of the field index to retrieve.
+     * @return if field is defined, the index; else negative.
+     */
+    public static int H5Tget_member_index(int type_id, String field_name)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_index(type_id, field_name);
+        }
+    }
+
+    /**
+     * H5Tget_member_class returns the datatype of the specified member.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field type to retrieve.
+     * @return the identifier of a copy of the datatype of the field if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_member_class(int type_id, int field_idx) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_class(type_id, field_idx);
+        }
+    }
+
+    /**
+     * H5Tget_member_type returns the datatype of the specified member.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param field_idx Field index (0-based) of the field type to retrieve.
+     * @return the identifier of a copy of the datatype of the field if successful;
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_member_type(int type_id, int field_idx) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_type(type_id, field_idx);
+        }
+    }
+
+    /**
+     * H5Tget_member_offset returns the byte offset of the specified member of the compound
+     * datatype. This is the byte offset in the HDF-5 file/library, NOT the offset of any Java
+     * object which might be mapped to this data item.
+     * 
+     * @param type_id Identifier of datatype to query.
+     * @param membno Field index (0-based) of the field type to retrieve.
+     * @return the offset of the member.
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static long H5Tget_member_offset(int type_id, int membno) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_offset(type_id, membno);
+        }
+    }
+
+    /**
+     * H5Tinsert adds another member to the compound datatype type_id.
+     * 
+     * @param type_id Identifier of compound datatype to modify.
+     * @param name Name of the field to insert.
+     * @param offset Offset in memory structure of the field to insert.
+     * @param field_id Datatype identifier of the field to insert.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tinsert(int type_id, String name, long offset, int field_id)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tinsert(type_id, name, offset, field_id);
+        }
+    }
+
+    /**
+     * H5Tpack recursively removes padding from within a compound datatype to make it more efficient
+     * (space-wise) to store that data.
+     * <P>
+     * <b>WARNING:</b> This call only affects the C-data, even if it succeeds, there may be no
+     * visible effect on Java objects.
+     * 
+     * @param type_id Identifier of datatype to modify.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tpack(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tpack(type_id);
+        }
+    }
+
+    /**
+     * H5Tclose releases a datatype.
+     * 
+     * @param type_id Identifier of datatype to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tclose(int type_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tclose(type_id);
+        }
+    }
+
+    /**
+     * H5Tenum_create creates a new enumeration datatype based on the specified base datatype,
+     * parent_id, which must be an integer type.
+     * 
+     * @param base_id Identifier of the parent datatype to release.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tenum_create(int base_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_create(base_id);
+        }
+    }
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into an 8bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tenum_insert(int type, String name, byte value)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_insert(type, name, value);
+        }
+    }
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into a 16bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tenum_insert(int type, String name, short value)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_insert(type, name, value);
+        }
+    }
+
+    /**
+     * H5Tenum_insert inserts a new enumeration datatype member into a 32bit enumeration datatype.
+     * 
+     * @param type Identifier of datatype.
+     * @param name The name of the member
+     * @param value The value of the member, data of the correct type
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tenum_insert(int type, String name, int value) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_insert(type, name, value);
+        }
+    }
+
+    /**
+     * Converts the <var>value</var> (in place) to little endian.
+     * 
+     * @return a non-negative value if successful
+     */
+    public static int H5Tconvert_to_little_endian(short[] value)
+
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tconvert_to_little_endian(value);
+        }
+    }
+
+    /**
+     * Converts the <var>value</var> (in place) to little endian.
+     * 
+     * @return a non-negative value if successful
+     */
+    public static int H5Tconvert_to_little_endian(int[] value)
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tconvert_to_little_endian(value);
+        }
+    }
+
+    /**
+     * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the
+     * enumeration datatype type.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @param value IN: The value of the member, data of the correct
+     * @param name OUT: The name of the member
+     * @param size IN: The max length of the name
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tenum_nameof(int type, int[] value, String[] name, int size)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_nameof(type, value, name, size);
+        }
+    }
+
+    /**
+     * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration
+     * datatype type.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @param name IN: The name of the member
+     * @param value OUT: The value of the member
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tenum_valueof(int type, String name, int[] value)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tenum_valueof(type, name, value);
+        }
+    }
+
+    /**
+     * H5Tvlen_create creates a new variable-length (VL) dataype.
+     * 
+     * @param base_id IN: Identifier of parent datatype.
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tvlen_create(int base_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tvlen_create(base_id);
+        }
+    }
+
+    /**
+     * H5Tset_tag tags an opaque datatype type_id with a unique ASCII identifier tag.
+     * 
+     * @param type IN: Identifier of parent datatype.
+     * @param tag IN: Name of the tag (will be stored as ASCII)
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tset_tag(int type, String tag) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tset_tag(type, tag);
+        }
+    }
+
+    /**
+     * H5Tget_tag returns the tag associated with datatype type_id.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @return the tag
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static String H5Tget_tag(int type) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_tag(type);
+        }
+    }
+
+    /**
+     * H5Tget_super returns the type from which TYPE is derived.
+     * 
+     * @param type IN: Identifier of datatype.
+     * @return the parent type
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     */
+    public static int H5Tget_super(int type) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_super(type);
+        }
+    }
+
+    /**
+     * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
+     * 
+     * @param type_id IN: Identifier of datatype.
+     * @param membno IN: The name of the member
+     * @param value OUT: The value of the member
+     * @return a non-negative value if successful
+     * @exception HDF5LibraryException - Error from the HDF-5 Library.
+     * @exception NullPointerException - name is null.
+     */
+    public static int H5Tget_member_value(int type_id, int membno, int[] value)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_member_value(type_id, membno, value);
+        }
+    }
+
+    /**
+     * Creates an array datatype object.
+     * 
+     * @param base_type_id Datatype identifier for the array base datatype.
+     * @param rank Rank of the array.
+     * @param dims Size of each array dimension.
+     * @return a valid datatype identifier if successful; otherwise returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     * @exception NullPointerException rank is < 1 or dims is null.
+     */
+    public static int H5Tarray_create(int base_type_id, int rank, int[] dims)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tarray_create(base_type_id, rank, dims);
+        }
+    }
+
+    /**
+     * Returns the rank of an array datatype.
+     * 
+     * @param adtype_id Datatype identifier of array object.
+     * @return the rank of the array if successful; otherwise returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     */
+    public static int H5Tget_array_ndims(int adtype_id) throws HDF5LibraryException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_array_ndims(adtype_id);
+        }
+    }
+
+    /**
+     * Returns sizes of array dimensions.
+     * 
+     * @param adtype_id IN: Datatype identifier of array object.
+     * @param dims OUT: Sizes of array dimensions.
+     * @return the non-negative number of dimensions of the array type if successful; otherwise
+     *         returns a negative value.
+     * @exception HDF5LibraryException Error from the HDF5 Library.
+     * @exception NullPointerException dims is null.
+     */
+    public static int H5Tget_array_dims(int adtype_id, int[] dims) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_array_dims(adtype_id, dims);
+        }
+    }
+
+    public static int H5Tget_native_type(int tid, int alloc_time) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_native_type(tid, alloc_time);
+        }
+    }
+
+    public static int H5Tget_native_type(final int tid) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tget_native_type(tid);
+        }
+    }
+
+    public static boolean H5Tis_variable_str(int dtype_id) throws HDF5LibraryException,
+            NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tis_variable_str(dtype_id);
+        }
+    }
+
+    public static boolean H5Tdetect_class(int dtype_id, int dtype_class)
+            throws HDF5LibraryException, NullPointerException
+    {
+        synchronized (ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.H5Tdetect_class(dtype_id, dtype_class);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5Constants.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5Constants.java
new file mode 100755
index 0000000..9a43c6c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5Constants.java
@@ -0,0 +1,1941 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+/**
+ * /** This class contains C constants and enumerated types of HDF5 library. The values of these constants are obtained
+ * from the library by calling J2C(int jconstant), where jconstant is any of the private constants which start their
+ * name with "JH5" need to be converted.
+ * <P>
+ * <B>Do not edit this file!</b> <b>See also:</b> ncsa.hdf.hdf5lib.HDF5Library
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ */
+public class HDF5Constants
+{
+    // /////////////////////////////////////////////////////////////////////////
+    // This list must be identical to H5Constants.h //
+    // DO NOT EDIT THE LIST !!! //
+    // /////////////////////////////////////////////////////////////////////////
+
+    final private static int JH5_SZIP_MAX_PIXELS_PER_BLOCK = 1000;
+
+    final private static int JH5_SZIP_NN_OPTION_MASK = 1010;
+
+    final private static int JH5_SZIP_EC_OPTION_MASK = 1020;
+
+    final private static int JH5_SZIP_ALLOW_K13_OPTION_MASK = 1021;
+
+    final private static int JH5_SZIP_CHIP_OPTION_MASK = 1022;
+
+    final private static int JH5D_ALLOC_TIME_DEFAULT = 1030;
+
+    final private static int JH5D_ALLOC_TIME_EARLY = 1040;
+
+    final private static int JH5D_ALLOC_TIME_ERROR = 1050;
+
+    final private static int JH5D_ALLOC_TIME_INCR = 1060;
+
+    final private static int JH5D_ALLOC_TIME_LATE = 1070;
+
+    final private static int JH5D_CHUNKED = 1080;
+
+    final private static int JH5D_COMPACT = 1090;
+
+    final private static int JH5D_CONTIGUOUS = 1100;
+
+    final private static int JH5D_FILL_TIME_ALLOC = 1110;
+
+    final private static int JH5D_FILL_TIME_ERROR = 1120;
+
+    final private static int JH5D_FILL_TIME_NEVER = 1130;
+
+    final private static int JH5D_FILL_VALUE_DEFAULT = 1140;
+
+    final private static int JH5D_FILL_VALUE_ERROR = 1150;
+
+    final private static int JH5D_FILL_VALUE_UNDEFINED = 1160;
+
+    final private static int JH5D_FILL_VALUE_USER_DEFINED = 1170;
+
+    final private static int JH5D_LAYOUT_ERROR = 1180;
+
+    final private static int JH5D_NLAYOUTS = 1190;
+
+    final private static int JH5D_SPACE_STATUS_ALLOCATED = 1200;
+
+    final private static int JH5D_SPACE_STATUS_ERROR = 1210;
+
+    final private static int JH5D_SPACE_STATUS_NOT_ALLOCATED = 1220;
+
+    final private static int JH5D_SPACE_STATUS_PART_ALLOCATED = 1230;
+
+    final private static int JH5E_ALIGNMENT = 1240;
+
+    final private static int JH5E_ALREADYEXISTS = 1250;
+
+    final private static int JH5E_ALREADYINIT = 1260;
+
+    final private static int JH5E_ARGS = 1270;
+
+    final private static int JH5E_ATOM = 1280;
+
+    final private static int JH5E_ATTR = 1290;
+
+    final private static int JH5E_BADATOM = 1300;
+
+    final private static int JH5E_BADFILE = 1310;
+
+    final private static int JH5E_BADGROUP = 1320;
+
+    final private static int JH5E_BADMESG = 1330;
+
+    final private static int JH5E_BADRANGE = 1340;
+
+    final private static int JH5E_BADSELECT = 1350;
+
+    final private static int JH5E_BADSIZE = 1360;
+
+    final private static int JH5E_BADTYPE = 1370;
+
+    final private static int JH5E_BADVALUE = 1380;
+
+    final private static int JH5E_BTREE = 1390;
+
+    final private static int JH5E_CACHE = 1400;
+
+    final private static int JH5E_CALLBACK = 1410;
+
+    final private static int JH5E_CANAPPLY = 1420;
+
+    final private static int JH5E_CANTCLIP = 1450;
+
+    final private static int JH5E_CANTCLOSEFILE = 1460;
+
+    final private static int JH5E_CANTCONVERT = 1470;
+
+    final private static int JH5E_CANTCOPY = 1480;
+
+    final private static int JH5E_CANTCOUNT = 1490;
+
+    final private static int JH5E_CANTCREATE = 1500;
+
+    final private static int JH5E_CANTDEC = 1510;
+
+    final private static int JH5E_CANTDECODE = 1520;
+
+    final private static int JH5E_CANTDELETE = 1530;
+
+    final private static int JH5E_CANTENCODE = 1540;
+
+    final private static int JH5E_CANTFLUSH = 1550;
+
+    final private static int JH5E_CANTFREE = 1560;
+
+    final private static int JH5E_CANTGET = 1570;
+
+    final private static int JH5E_CANTINC = 1580;
+
+    final private static int JH5E_CANTINIT = 1590;
+
+    final private static int JH5E_CANTINSERT = 1600;
+
+    final private static int JH5E_CANTLIST = 1610;
+
+    final private static int JH5E_CANTLOAD = 1620;
+
+    final private static int JH5E_CANTLOCK = 1630;
+
+    final private static int JH5E_CANTNEXT = 1650;
+
+    final private static int JH5E_CANTOPENFILE = 1660;
+
+    final private static int JH5E_CANTOPENOBJ = 1670;
+
+    final private static int JH5E_CANTREGISTER = 1690;
+
+    final private static int JH5E_CANTRELEASE = 1700;
+
+    final private static int JH5E_CANTSELECT = 1710;
+
+    final private static int JH5E_CANTSET = 1730;
+
+    final private static int JH5E_CANTSPLIT = 1740;
+
+    final private static int JH5E_CANTUNLOCK = 1750;
+
+    final private static int JH5E_CLOSEERROR = 1760;
+
+    final private static int JH5E_COMPLEN = 1770;
+
+    final private static int JH5E_DATASET = 1790;
+
+    final private static int JH5E_DATASPACE = 1800;
+
+    final private static int JH5E_DATATYPE = 1810;
+
+    final private static int JH5E_DUPCLASS = 1820;
+
+    final private static int JH5E_EFL = 1830;
+
+    final private static int JH5E_EXISTS = 1840;
+
+    final private static int JH5E_FCNTL = 1850;
+
+    final private static int JH5E_FILE = 1860;
+
+    final private static int JH5E_FILEEXISTS = 1870;
+
+    final private static int JH5E_FILEOPEN = 1880;
+
+    final private static int JH5E_FUNC = 1900;
+
+    final private static int JH5E_HEAP = 1910;
+
+    final private static int JH5E_INTERNAL = 1920;
+
+    final private static int JH5E_IO = 1930;
+
+    final private static int JH5E_LINK = 1940;
+
+    final private static int JH5E_LINKCOUNT = 1950;
+
+    final private static int JH5E_MOUNT = 1960;
+
+    final private static int JH5E_MPI = 1970;
+
+    final private static int JH5E_MPIERRSTR = 1980;
+
+    final private static int JH5E_NOFILTER = 1990;
+
+    final private static int JH5E_NOIDS = 2000;
+
+    final private static int JH5E_NONE_MAJOR = 2010;
+
+    final private static int JH5E_NONE_MINOR = 2020;
+
+    final private static int JH5E_NOSPACE = 2030;
+
+    final private static int JH5E_NOTCACHED = 2040;
+
+    final private static int JH5E_NOTFOUND = 2050;
+
+    final private static int JH5E_NOTHDF5 = 2060;
+
+    final private static int JH5E_OHDR = 2070;
+
+    final private static int JH5E_OVERFLOW = 2080;
+
+    final private static int JH5E_PLINE = 2090;
+
+    final private static int JH5E_PLIST = 2100;
+
+    final private static int JH5E_PROTECT = 2110;
+
+    final private static int JH5E_READERROR = 2120;
+
+    final private static int JH5E_REFERENCE = 2130;
+
+    final private static int JH5E_RESOURCE = 2140;
+
+    final private static int JH5E_RS = 2150;
+
+    final private static int JH5E_SEEKERROR = 2160;
+
+    final private static int JH5E_SETLOCAL = 2170;
+
+    final private static int JH5E_STORAGE = 2190;
+
+    final private static int JH5E_SYM = 2200;
+
+    final private static int JH5E_TRUNCATED = 2220;
+
+    final private static int JH5E_TST = 2230;
+
+    final private static int JH5E_UNINITIALIZED = 2240;
+
+    final private static int JH5E_UNSUPPORTED = 2250;
+
+    final private static int JH5E_VERSION = 2260;
+
+    final private static int JH5E_VFL = 2270;
+
+    final private static int JH5E_WALK_DOWNWARD = 2280;
+
+    final private static int JH5E_WALK_UPWARD = 2290;
+
+    final private static int JH5E_WRITEERROR = 2300;
+
+    final private static int JH5F_ACC_CREAT = 2310;
+
+    final private static int JH5F_ACC_DEBUG = 2320;
+
+    final private static int JH5F_ACC_EXCL = 2330;
+
+    final private static int JH5F_ACC_RDONLY = 2340;
+
+    final private static int JH5F_ACC_RDWR = 2350;
+
+    final private static int JH5F_ACC_TRUNC = 2360;
+
+    final private static int JH5F_CLOSE_DEFAULT = 2370;
+
+    final private static int JH5F_CLOSE_SEMI = 2380;
+
+    final private static int JH5F_CLOSE_STRONG = 2390;
+
+    final private static int JH5F_CLOSE_WEAK = 2400;
+
+    final private static int JH5F_OBJ_ALL = 2410;
+
+    final private static int JH5F_OBJ_ATTR = 2415;
+
+    final private static int JH5F_OBJ_DATASET = 2420;
+
+    final private static int JH5F_OBJ_DATATYPE = 2430;
+
+    final private static int JH5F_OBJ_FILE = 2440;
+
+    final private static int JH5F_OBJ_GROUP = 2450;
+
+    final private static int JH5F_SCOPE_GLOBAL = 2470;
+
+    final private static int JH5F_SCOPE_LOCAL = 2480;
+
+    final private static int JH5F_UNLIMITED = 2490;
+
+    final private static int JH5F_LIBVER_EARLIEST = 2494;
+
+    final private static int JH5F_LIBVER_LATEST = 2495;
+
+    final private static int JH5G_DATASET = 2500;
+
+    final private static int JH5G_GROUP = 2510;
+
+    final private static int JH5G_LINK = 2520;
+
+    final private static int JH5G_LINK_ERROR = 2530;
+
+    final private static int JH5G_LINK_HARD = 2540;
+
+    final private static int JH5G_LINK_SOFT = 2550;
+
+    final private static int JH5G_NLIBTYPES = 2560;
+
+    final private static int JH5G_NTYPES = 2570;
+
+    final private static int JH5G_NUSERTYPES = 2580;
+
+    final private static int JH5G_RESERVED_5 = 2600;
+
+    final private static int JH5G_RESERVED_6 = 2610;
+
+    final private static int JH5G_RESERVED_7 = 2620;
+
+    final private static int JH5G_SAME_LOC = 2630;
+
+    final private static int JH5G_TYPE = 2640;
+
+    final private static int JH5G_UNKNOWN = 2650;
+
+    final private static int JH5I_ATTR = 2670;
+
+    final private static int JH5I_BADID = 2680;
+
+    final private static int JH5I_DATASET = 2690;
+
+    final private static int JH5I_DATASPACE = 2700;
+
+    final private static int JH5I_DATATYPE = 2710;
+
+    final private static int JH5I_FILE = 2720;
+
+    final private static int JH5I_GENPROP_CLS = 2740;
+
+    final private static int JH5I_GENPROP_LST = 2750;
+
+    final private static int JH5I_GROUP = 2760;
+
+    final private static int JH5I_INVALID_HID = 2770;
+
+    final private static int JH5I_REFERENCE = 2790;
+
+    final private static int JH5I_VFL = 2810;
+
+    final private static int JH5O_TYPE_UNKNOWN = 5510;
+
+    final private static int JH5O_TYPE_GROUP = 5520;
+
+    final private static int JH5O_TYPE_DATASET = 5530;
+
+    final private static int JH5O_TYPE_NAMED_DATATYPE = 5540;
+
+    final private static int JH5O_TYPE_NTYPES = 5550;
+
+    final private static int JH5L_TYPE_ERROR = 5560;
+
+    final private static int JH5L_TYPE_HARD = 5570;
+
+    final private static int JH5L_TYPE_SOFT = 5580;
+
+    final private static int JH5L_TYPE_EXTERNAL = 5590;
+
+    final private static int JH5L_TYPE_MAX = 5600;
+
+    final private static int JH5P_DATASET_CREATE = 2820;
+
+    final private static int JH5P_DATASET_CREATE_DEFAULT = 2830;
+
+    final private static int JH5P_DATASET_XFER = 2840;
+
+    final private static int JH5P_DATASET_XFER_DEFAULT = 2850;
+
+    final private static int JH5P_DEFAULT = 2860;
+
+    final private static int JH5P_FILE_ACCESS = 2870;
+
+    final private static int JH5P_FILE_ACCESS_DEFAULT = 2880;
+
+    final private static int JH5P_FILE_CREATE = 2890;
+
+    final private static int JH5P_FILE_CREATE_DEFAULT = 2900;
+
+    final private static int JH5P_NO_CLASS = 2930;
+
+    final private static int JH5P_ROOT = 6000;
+    
+    final private static int JH5P_OBJECT_CREATE = 6010;
+    
+    final private static int JH5P_DATASET_ACCESS = 6020;
+    
+    final private static int JH5P_DATASET_ACCESS_DEFAULT = 6030;
+    
+    final private static int JH5P_FILE_MOUNT = 6040;
+    
+    final private static int JH5P_FILE_MOUNT_DEFAULT = 6050;
+    
+    final private static int JH5P_GROUP_CREATE = 6060;
+    
+    final private static int JH5P_GROUP_CREATE_DEFAULT = 6070;
+    
+    final private static int JH5P_GROUP_ACCESS = 6080;
+    
+    final private static int JH5P_GROUP_ACCESS_DEFAULT = 6090;
+    
+    final private static int JH5P_DATATYPE_CREATE = 6100;
+    
+    final private static int JH5P_DATATYPE_CREATE_DEFAULT = 6110;
+    
+    final private static int JH5P_DATATYPE_ACCESS = 6120;
+    
+    final private static int JH5P_DATATYPE_ACCESS_DEFAULT = 6130;
+    
+    final private static int JH5P_STRING_CREATE = 6140;
+    
+    final private static int JH5P_ATTRIBUTE_CREATE = 6150;
+    
+    final private static int JH5P_ATTRIBUTE_CREATE_DEFAULT = 6160;
+    
+    final private static int JH5P_OBJECT_COPY = 6170;
+    
+    final private static int JH5P_OBJECT_COPY_DEFAULT = 6180;
+    
+    final private static int JH5P_LINK_CREATE = 6190;
+    
+    final private static int JH5P_LINK_CREATE_DEFAULT = 6200;
+    
+    final private static int JH5P_LINK_ACCESS = 6210;
+    
+    final private static int JH5P_LINK_ACCESS_DEFAULT = 6220;
+    
+    final private static int JH5R_BADTYPE = 2950;
+
+    final private static int JH5R_DATASET_REGION = 2960;
+
+    final private static int JH5R_MAXTYPE = 2980;
+
+    final private static int JH5R_OBJ_REF_BUF_SIZE = 2990;
+
+    final private static int JH5R_OBJECT = 3000;
+
+    final private static int JH5S_ALL = 3010;
+
+    final private static int JH5S_MAX_RANK = 3030;
+
+    final private static int JH5S_NO_CLASS = 3040;
+
+    final private static int JH5S_SCALAR = 3050;
+
+    final private static int JH5S_SEL_ALL = 3060;
+
+    final private static int JH5S_SEL_ERROR = 3070;
+
+    final private static int JH5S_SEL_HYPERSLABS = 3080;
+
+    final private static int JH5S_SEL_N = 3090;
+
+    final private static int JH5S_SEL_NONE = 3100;
+
+    final private static int JH5S_SEL_POINTS = 3110;
+
+    final private static int JH5S_SELECT_AND = 3120;
+
+    final private static int JH5S_SELECT_APPEND = 3130;
+
+    final private static int JH5S_SELECT_INVALID = 3140;
+
+    final private static int JH5S_SELECT_NOOP = 3150;
+
+    final private static int JH5S_SELECT_NOTA = 3160;
+
+    final private static int JH5S_SELECT_NOTB = 3170;
+
+    final private static int JH5S_SELECT_OR = 3180;
+
+    final private static int JH5S_SELECT_PREPEND = 3190;
+
+    final private static int JH5S_SELECT_SET = 3200;
+
+    final private static int JH5S_SELECT_XOR = 3210;
+
+    final private static int JH5S_SIMPLE = 3220;
+
+    final private static int JH5S_UNLIMITED = 3230;
+
+    final private static int JH5T_ALPHA_B16 = 3240;
+
+    final private static int JH5T_ALPHA_B32 = 3250;
+
+    final private static int JH5T_ALPHA_B64 = 3260;
+
+    final private static int JH5T_ALPHA_B8 = 3270;
+
+    final private static int JH5T_ALPHA_F32 = 3280;
+
+    final private static int JH5T_ALPHA_F64 = 3290;
+
+    final private static int JH5T_ALPHA_I16 = 3300;
+
+    final private static int JH5T_ALPHA_I32 = 3310;
+
+    final private static int JH5T_ALPHA_I64 = 3320;
+
+    final private static int JH5T_ALPHA_I8 = 3330;
+
+    final private static int JH5T_ALPHA_U16 = 3340;
+
+    final private static int JH5T_ALPHA_U32 = 3350;
+
+    final private static int JH5T_ALPHA_U64 = 3360;
+
+    final private static int JH5T_ALPHA_U8 = 3370;
+
+    final private static int JH5T_ARRAY = 3380;
+
+    final private static int JH5T_BITFIELD = 3390;
+
+    final private static int JH5T_BKG_NO = 3400;
+
+    final private static int JH5T_BKG_YES = 3410;
+
+    final private static int JH5T_C_S1 = 3420;
+
+    final private static int JH5T_COMPOUND = 3430;
+
+    final private static int JH5T_CONV_CONV = 3440;
+
+    final private static int JH5T_CONV_FREE = 3450;
+
+    final private static int JH5T_CONV_INIT = 3460;
+
+    final private static int JH5T_CSET_ASCII = 3470;
+
+    final private static int JH5T_CSET_ERROR = 3480;
+    
+    final private static int JH5T_CSET_UTF8 = 3490;
+
+    final private static int JH5T_CSET_RESERVED_10 = 3500;
+
+    final private static int JH5T_CSET_RESERVED_11 = 3510;
+
+    final private static int JH5T_CSET_RESERVED_12 = 3520;
+
+    final private static int JH5T_CSET_RESERVED_13 = 3530;
+
+    final private static int JH5T_CSET_RESERVED_14 = 3540;
+
+    final private static int JH5T_CSET_RESERVED_15 = 3550;
+
+    final private static int JH5T_CSET_RESERVED_2 = 3560;
+
+    final private static int JH5T_CSET_RESERVED_3 = 3570;
+
+    final private static int JH5T_CSET_RESERVED_4 = 3580;
+
+    final private static int JH5T_CSET_RESERVED_5 = 3590;
+
+    final private static int JH5T_CSET_RESERVED_6 = 3600;
+
+    final private static int JH5T_CSET_RESERVED_7 = 3610;
+
+    final private static int JH5T_CSET_RESERVED_8 = 3620;
+
+    final private static int JH5T_CSET_RESERVED_9 = 3630;
+
+    final private static int JH5T_DIR_ASCEND = 3640;
+
+    final private static int JH5T_DIR_DEFAULT = 3650;
+
+    final private static int JH5T_DIR_DESCEND = 3660;
+
+    final private static int JH5T_ENUM = 3670;
+
+    final private static int JH5T_FLOAT = 3680;
+
+    final private static int JH5T_FORTRAN_S1 = 3690;
+
+    final private static int JH5T_IEEE_F32BE = 3700;
+
+    final private static int JH5T_IEEE_F32LE = 3710;
+
+    final private static int JH5T_IEEE_F64BE = 3720;
+
+    final private static int JH5T_IEEE_F64LE = 3730;
+
+    final private static int JH5T_INTEGER = 3740;
+
+    final private static int JH5T_INTEL_B16 = 3750;
+
+    final private static int JH5T_INTEL_B32 = 3760;
+
+    final private static int JH5T_INTEL_B64 = 3770;
+
+    final private static int JH5T_INTEL_B8 = 3780;
+
+    final private static int JH5T_INTEL_F32 = 3790;
+
+    final private static int JH5T_INTEL_F64 = 3800;
+
+    final private static int JH5T_INTEL_I16 = 3810;
+
+    final private static int JH5T_INTEL_I32 = 3820;
+
+    final private static int JH5T_INTEL_I64 = 3830;
+
+    final private static int JH5T_INTEL_I8 = 3840;
+
+    final private static int JH5T_INTEL_U16 = 3850;
+
+    final private static int JH5T_INTEL_U32 = 3860;
+
+    final private static int JH5T_INTEL_U64 = 3870;
+
+    final private static int JH5T_INTEL_U8 = 3880;
+
+    final private static int JH5T_MIPS_B16 = 3890;
+
+    final private static int JH5T_MIPS_B32 = 3900;
+
+    final private static int JH5T_MIPS_B64 = 3910;
+
+    final private static int JH5T_MIPS_B8 = 3920;
+
+    final private static int JH5T_MIPS_F32 = 3930;
+
+    final private static int JH5T_MIPS_F64 = 3940;
+
+    final private static int JH5T_MIPS_I16 = 3950;
+
+    final private static int JH5T_MIPS_I32 = 3960;
+
+    final private static int JH5T_MIPS_I64 = 3970;
+
+    final private static int JH5T_MIPS_I8 = 3980;
+
+    final private static int JH5T_MIPS_U16 = 3990;
+
+    final private static int JH5T_MIPS_U32 = 4000;
+
+    final private static int JH5T_MIPS_U64 = 4010;
+
+    final private static int JH5T_MIPS_U8 = 4020;
+
+    final private static int JH5T_NATIVE_B16 = 4030;
+
+    final private static int JH5T_NATIVE_B32 = 4040;
+
+    final private static int JH5T_NATIVE_B64 = 4050;
+
+    final private static int JH5T_NATIVE_B8 = 4060;
+
+    final private static int JH5T_NATIVE_CHAR = 4070;
+
+    final private static int JH5T_NATIVE_DOUBLE = 4080;
+
+    final private static int JH5T_NATIVE_FLOAT = 4090;
+
+    final private static int JH5T_NATIVE_HADDR = 4100;
+
+    final private static int JH5T_NATIVE_HBOOL = 4110;
+
+    final private static int JH5T_NATIVE_HERR = 4120;
+
+    final private static int JH5T_NATIVE_HSIZE = 4130;
+
+    final private static int JH5T_NATIVE_HSSIZE = 4140;
+
+    final private static int JH5T_NATIVE_INT = 4150;
+
+    final private static int JH5T_NATIVE_INT_FAST16 = 4160;
+
+    final private static int JH5T_NATIVE_INT_FAST32 = 4170;
+
+    final private static int JH5T_NATIVE_INT_FAST64 = 4180;
+
+    final private static int JH5T_NATIVE_INT_FAST8 = 4190;
+
+    final private static int JH5T_NATIVE_INT_LEAST16 = 4200;
+
+    final private static int JH5T_NATIVE_INT_LEAST32 = 4210;
+
+    final private static int JH5T_NATIVE_INT_LEAST64 = 4220;
+
+    final private static int JH5T_NATIVE_INT_LEAST8 = 4230;
+
+    final private static int JH5T_NATIVE_INT16 = 4240;
+
+    final private static int JH5T_NATIVE_INT32 = 4250;
+
+    final private static int JH5T_NATIVE_INT64 = 4260;
+
+    final private static int JH5T_NATIVE_INT8 = 4270;
+
+    final private static int JH5T_NATIVE_LDOUBLE = 4280;
+
+    final private static int JH5T_NATIVE_LLONG = 4290;
+
+    final private static int JH5T_NATIVE_LONG = 4300;
+
+    final private static int JH5T_NATIVE_OPAQUE = 4310;
+
+    final private static int JH5T_NATIVE_SCHAR = 4320;
+
+    final private static int JH5T_NATIVE_SHORT = 4330;
+
+    final private static int JH5T_NATIVE_UCHAR = 4340;
+
+    final private static int JH5T_NATIVE_UINT = 4350;
+
+    final private static int JH5T_NATIVE_UINT_FAST16 = 4360;
+
+    final private static int JH5T_NATIVE_UINT_FAST32 = 4370;
+
+    final private static int JH5T_NATIVE_UINT_FAST64 = 4380;
+
+    final private static int JH5T_NATIVE_UINT_FAST8 = 4390;
+
+    final private static int JH5T_NATIVE_UINT_LEAST16 = 4400;
+
+    final private static int JH5T_NATIVE_UINT_LEAST32 = 4410;
+
+    final private static int JH5T_NATIVE_UINT_LEAST64 = 4420;
+
+    final private static int JH5T_NATIVE_UINT_LEAST8 = 4430;
+
+    final private static int JH5T_NATIVE_UINT16 = 4440;
+
+    final private static int JH5T_NATIVE_UINT32 = 4450;
+
+    final private static int JH5T_NATIVE_UINT64 = 4460;
+
+    final private static int JH5T_NATIVE_UINT8 = 4470;
+
+    final private static int JH5T_NATIVE_ULLONG = 4480;
+
+    final private static int JH5T_NATIVE_ULONG = 4490;
+
+    final private static int JH5T_NATIVE_USHORT = 4500;
+
+    final private static int JH5T_NCLASSES = 4510;
+
+    final private static int JH5T_NO_CLASS = 4520;
+
+    final private static int JH5T_NORM_ERROR = 4530;
+
+    final private static int JH5T_NORM_IMPLIED = 4540;
+
+    final private static int JH5T_NORM_MSBSET = 4550;
+
+    final private static int JH5T_NORM_NONE = 4560;
+
+    final private static int JH5T_NPAD = 4570;
+
+    final private static int JH5T_NSGN = 4580;
+
+    final private static int JH5T_OPAQUE = 4590;
+
+    final private static int JH5T_OPAQUE_TAG_MAX = 4595; /* 1.6.5 */
+
+    final private static int JH5T_ORDER_BE = 4600;
+
+    final private static int JH5T_ORDER_ERROR = 4610;
+
+    final private static int JH5T_ORDER_LE = 4620;
+
+    final private static int JH5T_ORDER_NONE = 4630;
+
+    final private static int JH5T_ORDER_VAX = 4640;
+
+    final private static int JH5T_PAD_BACKGROUND = 4650;
+
+    final private static int JH5T_PAD_ERROR = 4660;
+
+    final private static int JH5T_PAD_ONE = 4670;
+
+    final private static int JH5T_PAD_ZERO = 4680;
+
+    final private static int JH5T_PERS_DONTCARE = 4690;
+
+    final private static int JH5T_PERS_HARD = 4700;
+
+    final private static int JH5T_PERS_SOFT = 4710;
+
+    final private static int JH5T_REFERENCE = 4720;
+
+    final private static int JH5T_SGN_2 = 4730;
+
+    final private static int JH5T_SGN_ERROR = 4740;
+
+    final private static int JH5T_SGN_NONE = 4750;
+
+    final private static int JH5T_STD_B16BE = 4760;
+
+    final private static int JH5T_STD_B16LE = 4770;
+
+    final private static int JH5T_STD_B32BE = 4780;
+
+    final private static int JH5T_STD_B32LE = 4790;
+
+    final private static int JH5T_STD_B64BE = 4800;
+
+    final private static int JH5T_STD_B64LE = 4810;
+
+    final private static int JH5T_STD_B8BE = 4820;
+
+    final private static int JH5T_STD_B8LE = 4830;
+
+    final private static int JH5T_STD_I16BE = 4840;
+
+    final private static int JH5T_STD_I16LE = 4850;
+
+    final private static int JH5T_STD_I32BE = 4860;
+
+    final private static int JH5T_STD_I32LE = 4870;
+
+    final private static int JH5T_STD_I64BE = 4880;
+
+    final private static int JH5T_STD_I64LE = 4890;
+
+    final private static int JH5T_STD_I8BE = 4900;
+
+    final private static int JH5T_STD_I8LE = 4910;
+
+    final private static int JH5T_STD_REF_DSETREG = 4920;
+
+    final private static int JH5T_STD_REF_OBJ = 4930;
+
+    final private static int JH5T_STD_U16BE = 4940;
+
+    final private static int JH5T_STD_U16LE = 4950;
+
+    final private static int JH5T_STD_U32BE = 4960;
+
+    final private static int JH5T_STD_U32LE = 4970;
+
+    final private static int JH5T_STD_U64BE = 4980;
+
+    final private static int JH5T_STD_U64LE = 4990;
+
+    final private static int JH5T_STD_U8BE = 5000;
+
+    final private static int JH5T_STD_U8LE = 5010;
+
+    final private static int JH5T_STR_ERROR = 5020;
+
+    final private static int JH5T_STR_NULLPAD = 5030;
+
+    final private static int JH5T_STR_NULLTERM = 5040;
+
+    final private static int JH5T_STR_RESERVED_10 = 5050;
+
+    final private static int JH5T_STR_RESERVED_11 = 5060;
+
+    final private static int JH5T_STR_RESERVED_12 = 5070;
+
+    final private static int JH5T_STR_RESERVED_13 = 5080;
+
+    final private static int JH5T_STR_RESERVED_14 = 5090;
+
+    final private static int JH5T_STR_RESERVED_15 = 5100;
+
+    final private static int JH5T_STR_RESERVED_3 = 5110;
+
+    final private static int JH5T_STR_RESERVED_4 = 5120;
+
+    final private static int JH5T_STR_RESERVED_5 = 5130;
+
+    final private static int JH5T_STR_RESERVED_6 = 5140;
+
+    final private static int JH5T_STR_RESERVED_7 = 5150;
+
+    final private static int JH5T_STR_RESERVED_8 = 5160;
+
+    final private static int JH5T_STR_RESERVED_9 = 5170;
+
+    final private static int JH5T_STR_SPACEPAD = 5180;
+
+    final private static int JH5T_STRING = 5190;
+
+    final private static int JH5T_TIME = 5200;
+
+    final private static int JH5T_UNIX_D32BE = 5210;
+
+    final private static int JH5T_UNIX_D32LE = 5220;
+
+    final private static int JH5T_UNIX_D64BE = 5230;
+
+    final private static int JH5T_UNIX_D64LE = 5240;
+
+    final private static int JH5T_VARIABLE = 5245;
+
+    final private static int JH5T_VLEN = 5250;
+
+    final private static int JH5Z_CB_CONT = 5260;
+
+    final private static int JH5Z_CB_ERROR = 5270;
+
+    final private static int JH5Z_CB_FAIL = 5280;
+
+    final private static int JH5Z_CB_NO = 5290;
+
+    final private static int JH5Z_DISABLE_EDC = 5300;
+
+    final private static int JH5Z_ENABLE_EDC = 5310;
+
+    final private static int JH5Z_ERROR_EDC = 5320;
+
+    final private static int JH5Z_FILTER_DEFLATE = 5330;
+
+    final private static int JH5Z_FILTER_ERROR = 5340;
+
+    final private static int JH5Z_FILTER_FLETCHER32 = 5350;
+
+    final private static int JH5Z_FILTER_MAX = 5360;
+
+    final private static int JH5Z_FILTER_NONE = 5370;
+
+    final private static int JH5Z_FILTER_RESERVED = 5380;
+
+    final private static int JH5Z_FILTER_SHUFFLE = 5390;
+
+    final private static int JH5Z_FILTER_SZIP = 5400;
+
+    final private static int JH5Z_FLAG_DEFMASK = 5410;
+
+    final private static int JH5Z_FLAG_INVMASK = 5420;
+
+    final private static int JH5Z_FLAG_MANDATORY = 5430;
+
+    final private static int JH5Z_FLAG_OPTIONAL = 5440;
+
+    final private static int JH5Z_FLAG_REVERSE = 5450;
+
+    final private static int JH5Z_FLAG_SKIP_EDC = 5460;
+
+    final private static int JH5Z_MAX_NFILTERS = 5470;
+
+    final private static int JH5Z_NO_EDC = 5480;
+    
+    final private static int JH5Z_SO_INT = 5481;
+
+    final private static int JH5Z_SO_FLOAT_DSCALE = 5482;
+
+    final private static int JH5Z_SO_FLOAT_ESCALE = 5483;
+
+    final private static int JH5Z_FILTER_CONFIG_ENCODE_ENABLED = 5490;
+
+    final private static int JH5Z_FILTER_CONFIG_DECODE_ENABLED = 5500;
+
+    // /////////////////////////////////////////////////////////////////////////
+    // Get the HDF5 constants from the library //
+    // /////////////////////////////////////////////////////////////////////////
+
+    final public static int H5_SZIP_MAX_PIXELS_PER_BLOCK = javaToC(JH5_SZIP_MAX_PIXELS_PER_BLOCK);
+
+    final public static int H5_SZIP_NN_OPTION_MASK = javaToC(JH5_SZIP_NN_OPTION_MASK);
+
+    final public static int H5_SZIP_EC_OPTION_MASK = javaToC(JH5_SZIP_EC_OPTION_MASK);
+
+    final public static int H5_SZIP_ALLOW_K13_OPTION_MASK = javaToC(JH5_SZIP_ALLOW_K13_OPTION_MASK);
+
+    final public static int H5_SZIP_CHIP_OPTION_MASK = javaToC(JH5_SZIP_CHIP_OPTION_MASK);
+
+    final public static int H5D_ALLOC_TIME_DEFAULT = javaToC(JH5D_ALLOC_TIME_DEFAULT);
+
+    final public static int H5D_ALLOC_TIME_EARLY = javaToC(JH5D_ALLOC_TIME_EARLY);
+
+    final public static int H5D_ALLOC_TIME_ERROR = javaToC(JH5D_ALLOC_TIME_ERROR);
+
+    final public static int H5D_ALLOC_TIME_INCR = javaToC(JH5D_ALLOC_TIME_INCR);
+
+    final public static int H5D_ALLOC_TIME_LATE = javaToC(JH5D_ALLOC_TIME_LATE);
+
+    final public static int H5D_CHUNKED = javaToC(JH5D_CHUNKED);
+
+    final public static int H5D_COMPACT = javaToC(JH5D_COMPACT);
+
+    final public static int H5D_CONTIGUOUS = javaToC(JH5D_CONTIGUOUS);
+
+    final public static int H5D_FILL_TIME_ALLOC = javaToC(JH5D_FILL_TIME_ALLOC);
+
+    final public static int H5D_FILL_TIME_ERROR = javaToC(JH5D_FILL_TIME_ERROR);
+
+    final public static int H5D_FILL_TIME_NEVER = javaToC(JH5D_FILL_TIME_NEVER);
+
+    final public static int H5D_FILL_VALUE_DEFAULT = javaToC(JH5D_FILL_VALUE_DEFAULT);
+
+    final public static int H5D_FILL_VALUE_ERROR = javaToC(JH5D_FILL_VALUE_ERROR);
+
+    final public static int H5D_FILL_VALUE_UNDEFINED = javaToC(JH5D_FILL_VALUE_UNDEFINED);
+
+    final public static int H5D_FILL_VALUE_USER_DEFINED = javaToC(JH5D_FILL_VALUE_USER_DEFINED);
+
+    final public static int H5D_LAYOUT_ERROR = javaToC(JH5D_LAYOUT_ERROR);
+
+    final public static int H5D_NLAYOUTS = javaToC(JH5D_NLAYOUTS);
+
+    final public static int H5D_SPACE_STATUS_ALLOCATED = javaToC(JH5D_SPACE_STATUS_ALLOCATED);
+
+    final public static int H5D_SPACE_STATUS_ERROR = javaToC(JH5D_SPACE_STATUS_ERROR);
+
+    final public static int H5D_SPACE_STATUS_NOT_ALLOCATED =
+            javaToC(JH5D_SPACE_STATUS_NOT_ALLOCATED);
+
+    final public static int H5D_SPACE_STATUS_PART_ALLOCATED =
+            javaToC(JH5D_SPACE_STATUS_PART_ALLOCATED);
+
+    final public static int H5E_ALIGNMENT = javaToC(JH5E_ALIGNMENT);
+
+    final public static int H5E_ALREADYEXISTS = javaToC(JH5E_ALREADYEXISTS);
+
+    final public static int H5E_ALREADYINIT = javaToC(JH5E_ALREADYINIT);
+
+    final public static int H5E_ARGS = javaToC(JH5E_ARGS);
+
+    final public static int H5E_ATOM = javaToC(JH5E_ATOM);
+
+    final public static int H5E_ATTR = javaToC(JH5E_ATTR);
+
+    final public static int H5E_BADATOM = javaToC(JH5E_BADATOM);
+
+    final public static int H5E_BADFILE = javaToC(JH5E_BADFILE);
+
+    final public static int H5E_BADGROUP = javaToC(JH5E_BADGROUP);
+
+    final public static int H5E_BADMESG = javaToC(JH5E_BADMESG);
+
+    final public static int H5E_BADRANGE = javaToC(JH5E_BADRANGE);
+
+    final public static int H5E_BADSELECT = javaToC(JH5E_BADSELECT);
+
+    final public static int H5E_BADSIZE = javaToC(JH5E_BADSIZE);
+
+    final public static int H5E_BADTYPE = javaToC(JH5E_BADTYPE);
+
+    final public static int H5E_BADVALUE = javaToC(JH5E_BADVALUE);
+
+    final public static int H5E_BTREE = javaToC(JH5E_BTREE);
+
+    final public static int H5E_CACHE = javaToC(JH5E_CACHE);
+
+    final public static int H5E_CALLBACK = javaToC(JH5E_CALLBACK);
+
+    final public static int H5E_CANAPPLY = javaToC(JH5E_CANAPPLY);
+
+    final public static int H5E_CANTCLIP = javaToC(JH5E_CANTCLIP);
+
+    final public static int H5E_CANTCLOSEFILE = javaToC(JH5E_CANTCLOSEFILE);
+
+    final public static int H5E_CANTCONVERT = javaToC(JH5E_CANTCONVERT);
+
+    final public static int H5E_CANTCOPY = javaToC(JH5E_CANTCOPY);
+
+    final public static int H5E_CANTCOUNT = javaToC(JH5E_CANTCOUNT);
+
+    final public static int H5E_CANTCREATE = javaToC(JH5E_CANTCREATE);
+
+    final public static int H5E_CANTDEC = javaToC(JH5E_CANTDEC);
+
+    final public static int H5E_CANTDECODE = javaToC(JH5E_CANTDECODE);
+
+    final public static int H5E_CANTDELETE = javaToC(JH5E_CANTDELETE);
+
+    final public static int H5E_CANTENCODE = javaToC(JH5E_CANTENCODE);
+
+    final public static int H5E_CANTFLUSH = javaToC(JH5E_CANTFLUSH);
+
+    final public static int H5E_CANTFREE = javaToC(JH5E_CANTFREE);
+
+    final public static int H5E_CANTGET = javaToC(JH5E_CANTGET);
+
+    final public static int H5E_CANTINC = javaToC(JH5E_CANTINC);
+
+    final public static int H5E_CANTINIT = javaToC(JH5E_CANTINIT);
+
+    final public static int H5E_CANTINSERT = javaToC(JH5E_CANTINSERT);
+
+    final public static int H5E_CANTLIST = javaToC(JH5E_CANTLIST);
+
+    final public static int H5E_CANTLOAD = javaToC(JH5E_CANTLOAD);
+
+    final public static int H5E_CANTLOCK = javaToC(JH5E_CANTLOCK);
+
+    final public static int H5E_CANTNEXT = javaToC(JH5E_CANTNEXT);
+
+    final public static int H5E_CANTOPENFILE = javaToC(JH5E_CANTOPENFILE);
+
+    final public static int H5E_CANTOPENOBJ = javaToC(JH5E_CANTOPENOBJ);
+
+    final public static int H5E_CANTREGISTER = javaToC(JH5E_CANTREGISTER);
+
+    final public static int H5E_CANTRELEASE = javaToC(JH5E_CANTRELEASE);
+
+    final public static int H5E_CANTSELECT = javaToC(JH5E_CANTSELECT);
+
+    final public static int H5E_CANTSET = javaToC(JH5E_CANTSET);
+
+    final public static int H5E_CANTSPLIT = javaToC(JH5E_CANTSPLIT);
+
+    final public static int H5E_CANTUNLOCK = javaToC(JH5E_CANTUNLOCK);
+
+    final public static int H5E_CLOSEERROR = javaToC(JH5E_CLOSEERROR);
+
+    final public static int H5E_COMPLEN = javaToC(JH5E_COMPLEN);
+
+    final public static int H5E_DATASET = javaToC(JH5E_DATASET);
+
+    final public static int H5E_DATASPACE = javaToC(JH5E_DATASPACE);
+
+    final public static int H5E_DATATYPE = javaToC(JH5E_DATATYPE);
+
+    final public static int H5E_DUPCLASS = javaToC(JH5E_DUPCLASS);
+
+    final public static int H5E_EFL = javaToC(JH5E_EFL);
+
+    final public static int H5E_EXISTS = javaToC(JH5E_EXISTS);
+
+    final public static int H5E_FCNTL = javaToC(JH5E_FCNTL);
+
+    final public static int H5E_FILE = javaToC(JH5E_FILE);
+
+    final public static int H5E_FILEEXISTS = javaToC(JH5E_FILEEXISTS);
+
+    final public static int H5E_FILEOPEN = javaToC(JH5E_FILEOPEN);
+
+    final public static int H5E_FUNC = javaToC(JH5E_FUNC);
+
+    final public static int H5E_HEAP = javaToC(JH5E_HEAP);
+
+    final public static int H5E_INTERNAL = javaToC(JH5E_INTERNAL);
+
+    final public static int H5E_IO = javaToC(JH5E_IO);
+
+    final public static int H5E_LINK = javaToC(JH5E_LINK);
+
+    final public static int H5E_LINKCOUNT = javaToC(JH5E_LINKCOUNT);
+
+    final public static int H5E_MOUNT = javaToC(JH5E_MOUNT);
+
+    final public static int H5E_MPI = javaToC(JH5E_MPI);
+
+    final public static int H5E_MPIERRSTR = javaToC(JH5E_MPIERRSTR);
+
+    final public static int H5E_NOFILTER = javaToC(JH5E_NOFILTER);
+
+    final public static int H5E_NOIDS = javaToC(JH5E_NOIDS);
+
+    final public static int H5E_NONE_MAJOR = javaToC(JH5E_NONE_MAJOR);
+
+    final public static int H5E_NONE_MINOR = javaToC(JH5E_NONE_MINOR);
+
+    final public static int H5E_NOSPACE = javaToC(JH5E_NOSPACE);
+
+    final public static int H5E_NOTCACHED = javaToC(JH5E_NOTCACHED);
+
+    final public static int H5E_NOTFOUND = javaToC(JH5E_NOTFOUND);
+
+    final public static int H5E_NOTHDF5 = javaToC(JH5E_NOTHDF5);
+
+    final public static int H5E_OHDR = javaToC(JH5E_OHDR);
+
+    final public static int H5E_OVERFLOW = javaToC(JH5E_OVERFLOW);
+
+    final public static int H5E_PLINE = javaToC(JH5E_PLINE);
+
+    final public static int H5E_PLIST = javaToC(JH5E_PLIST);
+
+    final public static int H5E_PROTECT = javaToC(JH5E_PROTECT);
+
+    final public static int H5E_READERROR = javaToC(JH5E_READERROR);
+
+    final public static int H5E_REFERENCE = javaToC(JH5E_REFERENCE);
+
+    final public static int H5E_RESOURCE = javaToC(JH5E_RESOURCE);
+
+    final public static int H5E_RS = javaToC(JH5E_RS);
+
+    final public static int H5E_SEEKERROR = javaToC(JH5E_SEEKERROR);
+
+    final public static int H5E_SETLOCAL = javaToC(JH5E_SETLOCAL);
+
+    final public static int H5E_STORAGE = javaToC(JH5E_STORAGE);
+
+    final public static int H5E_SYM = javaToC(JH5E_SYM);
+
+    final public static int H5E_TRUNCATED = javaToC(JH5E_TRUNCATED);
+
+    final public static int H5E_TST = javaToC(JH5E_TST);
+
+    final public static int H5E_UNINITIALIZED = javaToC(JH5E_UNINITIALIZED);
+
+    final public static int H5E_UNSUPPORTED = javaToC(JH5E_UNSUPPORTED);
+
+    final public static int H5E_VERSION = javaToC(JH5E_VERSION);
+
+    final public static int H5E_VFL = javaToC(JH5E_VFL);
+
+    final public static int H5E_WALK_DOWNWARD = javaToC(JH5E_WALK_DOWNWARD);
+
+    final public static int H5E_WALK_UPWARD = javaToC(JH5E_WALK_UPWARD);
+
+    final public static int H5E_WRITEERROR = javaToC(JH5E_WRITEERROR);
+
+    final public static int H5F_ACC_CREAT = javaToC(JH5F_ACC_CREAT);
+
+    final public static int H5F_ACC_DEBUG = javaToC(JH5F_ACC_DEBUG);
+
+    final public static int H5F_ACC_EXCL = javaToC(JH5F_ACC_EXCL);
+
+    final public static int H5F_ACC_RDONLY = javaToC(JH5F_ACC_RDONLY);
+
+    final public static int H5F_ACC_RDWR = javaToC(JH5F_ACC_RDWR);
+
+    final public static int H5F_ACC_TRUNC = javaToC(JH5F_ACC_TRUNC);
+
+    final public static int H5F_CLOSE_DEFAULT = javaToC(JH5F_CLOSE_DEFAULT);
+
+    final public static int H5F_CLOSE_SEMI = javaToC(JH5F_CLOSE_SEMI);
+
+    final public static int H5F_CLOSE_STRONG = javaToC(JH5F_CLOSE_STRONG);
+
+    final public static int H5F_CLOSE_WEAK = javaToC(JH5F_CLOSE_WEAK);
+
+    final public static int H5F_OBJ_ALL = javaToC(JH5F_OBJ_ALL);
+
+    final public static int H5F_OBJ_ATTR = javaToC(JH5F_OBJ_ATTR);
+
+    final public static int H5F_OBJ_DATASET = javaToC(JH5F_OBJ_DATASET);
+
+    final public static int H5F_OBJ_DATATYPE = javaToC(JH5F_OBJ_DATATYPE);
+
+    final public static int H5F_OBJ_FILE = javaToC(JH5F_OBJ_FILE);
+
+    final public static int H5F_OBJ_GROUP = javaToC(JH5F_OBJ_GROUP);
+
+    final public static int H5F_SCOPE_GLOBAL = javaToC(JH5F_SCOPE_GLOBAL);
+
+    final public static int H5F_SCOPE_LOCAL = javaToC(JH5F_SCOPE_LOCAL);
+
+    final public static int H5F_UNLIMITED = javaToC(JH5F_UNLIMITED);
+
+    final public static int H5F_LIBVER_EARLIEST = javaToC(JH5F_LIBVER_EARLIEST);
+
+    final public static int H5F_LIBVER_LATEST = javaToC(JH5F_LIBVER_LATEST);
+
+    final public static int H5G_DATASET = javaToC(JH5G_DATASET);
+
+    final public static int H5G_GROUP = javaToC(JH5G_GROUP);
+
+    final public static int H5G_LINK = javaToC(JH5G_LINK);
+
+    final public static int H5G_LINK_ERROR = javaToC(JH5G_LINK_ERROR);
+
+    final public static int H5G_LINK_HARD = javaToC(JH5G_LINK_HARD);
+
+    final public static int H5G_LINK_SOFT = javaToC(JH5G_LINK_SOFT);
+
+    final public static int H5G_NLIBTYPES = javaToC(JH5G_NLIBTYPES);
+
+    final public static int H5G_NTYPES = javaToC(JH5G_NTYPES);
+
+    final public static int H5G_NUSERTYPES = javaToC(JH5G_NUSERTYPES);
+
+    final public static int H5G_RESERVED_5 = javaToC(JH5G_RESERVED_5);
+
+    final public static int H5G_RESERVED_6 = javaToC(JH5G_RESERVED_6);
+
+    final public static int H5G_RESERVED_7 = javaToC(JH5G_RESERVED_7);
+
+    final public static int H5G_SAME_LOC = javaToC(JH5G_SAME_LOC);
+
+    final public static int H5G_TYPE = javaToC(JH5G_TYPE);
+
+    final public static int H5G_UNKNOWN = javaToC(JH5G_UNKNOWN);
+
+    final public static int H5I_ATTR = javaToC(JH5I_ATTR);
+
+    final public static int H5I_BADID = javaToC(JH5I_BADID);
+
+    final public static int H5I_DATASET = javaToC(JH5I_DATASET);
+
+    final public static int H5I_DATASPACE = javaToC(JH5I_DATASPACE);
+
+    final public static int H5I_DATATYPE = javaToC(JH5I_DATATYPE);
+
+    final public static int H5I_FILE = javaToC(JH5I_FILE);
+
+    final public static int H5I_GENPROP_CLS = javaToC(JH5I_GENPROP_CLS);
+
+    final public static int H5I_GENPROP_LST = javaToC(JH5I_GENPROP_LST);
+
+    final public static int H5I_GROUP = javaToC(JH5I_GROUP);
+
+    final public static int H5I_INVALID_HID = javaToC(JH5I_INVALID_HID);
+
+    final public static int H5I_REFERENCE = javaToC(JH5I_REFERENCE);
+
+    final public static int H5I_VFL = javaToC(JH5I_VFL);
+
+    final public static int H5O_TYPE_UNKNOWN = javaToC(JH5O_TYPE_UNKNOWN);
+
+    final public static int H5O_TYPE_GROUP = javaToC(JH5O_TYPE_GROUP);
+
+    final public static int H5O_TYPE_DATASET = javaToC(JH5O_TYPE_DATASET);
+
+    final public static int H5O_TYPE_NAMED_DATATYPE = javaToC(JH5O_TYPE_NAMED_DATATYPE);
+
+    final public static int H5O_TYPE_NTYPES = javaToC(JH5O_TYPE_NTYPES);
+
+    final public static int H5L_TYPE_ERROR = javaToC(JH5L_TYPE_ERROR);
+
+    final public static int H5L_TYPE_HARD = javaToC(JH5L_TYPE_HARD);
+
+    final public static int H5L_TYPE_SOFT = javaToC(JH5L_TYPE_SOFT);
+
+    final public static int H5L_TYPE_EXTERNAL = javaToC(JH5L_TYPE_EXTERNAL);
+
+    final public static int H5L_TYPE_MAX = javaToC(JH5L_TYPE_MAX);
+
+    final public static int H5P_DATASET_CREATE = javaToC(JH5P_DATASET_CREATE);
+
+    final public static int H5P_DATASET_CREATE_DEFAULT = javaToC(JH5P_DATASET_CREATE_DEFAULT);
+
+    final public static int H5P_DATASET_XFER = javaToC(JH5P_DATASET_XFER);
+
+    final public static int H5P_DATASET_XFER_DEFAULT = javaToC(JH5P_DATASET_XFER_DEFAULT);
+
+    final public static int H5P_DEFAULT = javaToC(JH5P_DEFAULT);
+
+    final public static int H5P_FILE_ACCESS = javaToC(JH5P_FILE_ACCESS);
+
+    final public static int H5P_FILE_ACCESS_DEFAULT = javaToC(JH5P_FILE_ACCESS_DEFAULT);
+
+    final public static int H5P_FILE_CREATE = javaToC(JH5P_FILE_CREATE);
+
+    final public static int H5P_FILE_CREATE_DEFAULT = javaToC(JH5P_FILE_CREATE_DEFAULT);
+
+    final public static int H5P_NO_CLASS = javaToC(JH5P_NO_CLASS);
+    
+    final public static int H5P_ROOT = javaToC(JH5P_ROOT);
+
+    final public static int H5P_OBJECT_CREATE = javaToC(JH5P_OBJECT_CREATE);
+
+    final public static int H5P_DATASET_ACCESS = javaToC(JH5P_DATASET_ACCESS);
+
+    final public static int H5P_DATASET_ACCESS_DEFAULT = javaToC(JH5P_DATASET_ACCESS_DEFAULT);
+
+    final public static int H5P_FILE_MOUNT = javaToC(JH5P_FILE_MOUNT);
+
+    final public static int H5P_FILE_MOUNT_DEFAULT = javaToC(JH5P_FILE_MOUNT_DEFAULT);
+
+    final public static int H5P_GROUP_CREATE = javaToC(JH5P_GROUP_CREATE);
+
+    final public static int H5P_GROUP_CREATE_DEFAULT = javaToC(JH5P_GROUP_CREATE_DEFAULT);
+
+    final public static int H5P_GROUP_ACCESS = javaToC(JH5P_GROUP_ACCESS);
+
+    final public static int H5P_GROUP_ACCESS_DEFAULT = javaToC(JH5P_GROUP_ACCESS_DEFAULT);
+
+    final public static int H5P_DATATYPE_CREATE = javaToC(JH5P_DATATYPE_CREATE);
+
+    final public static int H5P_DATATYPE_CREATE_DEFAULT = javaToC(JH5P_DATATYPE_CREATE_DEFAULT);
+
+    final public static int H5P_DATATYPE_ACCESS = javaToC(JH5P_DATATYPE_ACCESS);
+
+    final public static int H5P_DATATYPE_ACCESS_DEFAULT = javaToC(JH5P_DATATYPE_ACCESS_DEFAULT);
+
+    final public static int H5P_STRING_CREATE = javaToC(JH5P_STRING_CREATE);
+
+    final public static int H5P_ATTRIBUTE_CREATE = javaToC(JH5P_ATTRIBUTE_CREATE);
+
+    final public static int H5P_ATTRIBUTE_CREATE_DEFAULT = javaToC(JH5P_ATTRIBUTE_CREATE_DEFAULT);
+
+    final public static int H5P_OBJECT_COPY = javaToC(JH5P_OBJECT_COPY);
+
+    final public static int H5P_OBJECT_COPY_DEFAULT = javaToC(JH5P_OBJECT_COPY_DEFAULT);
+
+    final public static int H5P_LINK_CREATE = javaToC(JH5P_LINK_CREATE);
+
+    final public static int H5P_LINK_CREATE_DEFAULT = javaToC(JH5P_LINK_CREATE_DEFAULT);
+
+    final public static int H5P_LINK_ACCESS = javaToC(JH5P_LINK_ACCESS);
+
+    final public static int H5P_LINK_ACCESS_DEFAULT = javaToC(JH5P_LINK_ACCESS_DEFAULT);
+
+    final public static int H5R_BADTYPE = javaToC(JH5R_BADTYPE);
+
+    final public static int H5R_DATASET_REGION = javaToC(JH5R_DATASET_REGION);
+
+    final public static int H5R_MAXTYPE = javaToC(JH5R_MAXTYPE);
+
+    final public static int H5R_OBJ_REF_BUF_SIZE = javaToC(JH5R_OBJ_REF_BUF_SIZE);
+
+    final public static int H5R_OBJECT = javaToC(JH5R_OBJECT);
+
+    final public static int H5S_ALL = javaToC(JH5S_ALL);
+
+    final public static int H5S_MAX_RANK = javaToC(JH5S_MAX_RANK);
+
+    final public static int H5S_NO_CLASS = javaToC(JH5S_NO_CLASS);
+
+    final public static int H5S_SCALAR = javaToC(JH5S_SCALAR);
+
+    final public static int H5S_SEL_ALL = javaToC(JH5S_SEL_ALL);
+
+    final public static int H5S_SEL_ERROR = javaToC(JH5S_SEL_ERROR);
+
+    final public static int H5S_SEL_HYPERSLABS = javaToC(JH5S_SEL_HYPERSLABS);
+
+    final public static int H5S_SEL_N = javaToC(JH5S_SEL_N);
+
+    final public static int H5S_SEL_NONE = javaToC(JH5S_SEL_NONE);
+
+    final public static int H5S_SEL_POINTS = javaToC(JH5S_SEL_POINTS);
+
+    final public static int H5S_SELECT_AND = javaToC(JH5S_SELECT_AND);
+
+    final public static int H5S_SELECT_APPEND = javaToC(JH5S_SELECT_APPEND);
+
+    final public static int H5S_SELECT_INVALID = javaToC(JH5S_SELECT_INVALID);
+
+    final public static int H5S_SELECT_NOOP = javaToC(JH5S_SELECT_NOOP);
+
+    final public static int H5S_SELECT_NOTA = javaToC(JH5S_SELECT_NOTA);
+
+    final public static int H5S_SELECT_NOTB = javaToC(JH5S_SELECT_NOTB);
+
+    final public static int H5S_SELECT_OR = javaToC(JH5S_SELECT_OR);
+
+    final public static int H5S_SELECT_PREPEND = javaToC(JH5S_SELECT_PREPEND);
+
+    final public static int H5S_SELECT_SET = javaToC(JH5S_SELECT_SET);
+
+    final public static int H5S_SELECT_XOR = javaToC(JH5S_SELECT_XOR);
+
+    final public static int H5S_SIMPLE = javaToC(JH5S_SIMPLE);
+
+    final public static int H5S_UNLIMITED = javaToC(JH5S_UNLIMITED);
+
+    final public static int H5T_ALPHA_B16 = javaToC(JH5T_ALPHA_B16);
+
+    final public static int H5T_ALPHA_B32 = javaToC(JH5T_ALPHA_B32);
+
+    final public static int H5T_ALPHA_B64 = javaToC(JH5T_ALPHA_B64);
+
+    final public static int H5T_ALPHA_B8 = javaToC(JH5T_ALPHA_B8);
+
+    final public static int H5T_ALPHA_F32 = javaToC(JH5T_ALPHA_F32);
+
+    final public static int H5T_ALPHA_F64 = javaToC(JH5T_ALPHA_F64);
+
+    final public static int H5T_ALPHA_I16 = javaToC(JH5T_ALPHA_I16);
+
+    final public static int H5T_ALPHA_I32 = javaToC(JH5T_ALPHA_I32);
+
+    final public static int H5T_ALPHA_I64 = javaToC(JH5T_ALPHA_I64);
+
+    final public static int H5T_ALPHA_I8 = javaToC(JH5T_ALPHA_I8);
+
+    final public static int H5T_ALPHA_U16 = javaToC(JH5T_ALPHA_U16);
+
+    final public static int H5T_ALPHA_U32 = javaToC(JH5T_ALPHA_U32);
+
+    final public static int H5T_ALPHA_U64 = javaToC(JH5T_ALPHA_U64);
+
+    final public static int H5T_ALPHA_U8 = javaToC(JH5T_ALPHA_U8);
+
+    final public static int H5T_ARRAY = javaToC(JH5T_ARRAY);
+
+    final public static int H5T_BITFIELD = javaToC(JH5T_BITFIELD);
+
+    final public static int H5T_BKG_NO = javaToC(JH5T_BKG_NO);
+
+    final public static int H5T_BKG_YES = javaToC(JH5T_BKG_YES);
+
+    final public static int H5T_C_S1 = javaToC(JH5T_C_S1);
+
+    final public static int H5T_COMPOUND = javaToC(JH5T_COMPOUND);
+
+    final public static int H5T_CONV_CONV = javaToC(JH5T_CONV_CONV);
+
+    final public static int H5T_CONV_FREE = javaToC(JH5T_CONV_FREE);
+
+    final public static int H5T_CONV_INIT = javaToC(JH5T_CONV_INIT);
+
+    final public static int H5T_CSET_ASCII = javaToC(JH5T_CSET_ASCII);
+    
+    final public static int H5T_CSET_UTF8 = javaToC(JH5T_CSET_UTF8);
+
+    final public static int H5T_CSET_ERROR = javaToC(JH5T_CSET_ERROR);
+
+    final public static int H5T_CSET_RESERVED_10 = javaToC(JH5T_CSET_RESERVED_10);
+
+    final public static int H5T_CSET_RESERVED_11 = javaToC(JH5T_CSET_RESERVED_11);
+
+    final public static int H5T_CSET_RESERVED_12 = javaToC(JH5T_CSET_RESERVED_12);
+
+    final public static int H5T_CSET_RESERVED_13 = javaToC(JH5T_CSET_RESERVED_13);
+
+    final public static int H5T_CSET_RESERVED_14 = javaToC(JH5T_CSET_RESERVED_14);
+
+    final public static int H5T_CSET_RESERVED_15 = javaToC(JH5T_CSET_RESERVED_15);
+
+    final public static int H5T_CSET_RESERVED_2 = javaToC(JH5T_CSET_RESERVED_2);
+
+    final public static int H5T_CSET_RESERVED_3 = javaToC(JH5T_CSET_RESERVED_3);
+
+    final public static int H5T_CSET_RESERVED_4 = javaToC(JH5T_CSET_RESERVED_4);
+
+    final public static int H5T_CSET_RESERVED_5 = javaToC(JH5T_CSET_RESERVED_5);
+
+    final public static int H5T_CSET_RESERVED_6 = javaToC(JH5T_CSET_RESERVED_6);
+
+    final public static int H5T_CSET_RESERVED_7 = javaToC(JH5T_CSET_RESERVED_7);
+
+    final public static int H5T_CSET_RESERVED_8 = javaToC(JH5T_CSET_RESERVED_8);
+
+    final public static int H5T_CSET_RESERVED_9 = javaToC(JH5T_CSET_RESERVED_9);
+
+    final public static int H5T_DIR_ASCEND = javaToC(JH5T_DIR_ASCEND);
+
+    final public static int H5T_DIR_DEFAULT = javaToC(JH5T_DIR_DEFAULT);
+
+    final public static int H5T_DIR_DESCEND = javaToC(JH5T_DIR_DESCEND);
+
+    final public static int H5T_ENUM = javaToC(JH5T_ENUM);
+
+    final public static int H5T_FLOAT = javaToC(JH5T_FLOAT);
+
+    final public static int H5T_FORTRAN_S1 = javaToC(JH5T_FORTRAN_S1);
+
+    final public static int H5T_IEEE_F32BE = javaToC(JH5T_IEEE_F32BE);
+
+    final public static int H5T_IEEE_F32LE = javaToC(JH5T_IEEE_F32LE);
+
+    final public static int H5T_IEEE_F64BE = javaToC(JH5T_IEEE_F64BE);
+
+    final public static int H5T_IEEE_F64LE = javaToC(JH5T_IEEE_F64LE);
+
+    final public static int H5T_INTEGER = javaToC(JH5T_INTEGER);
+
+    final public static int H5T_INTEL_B16 = javaToC(JH5T_INTEL_B16);
+
+    final public static int H5T_INTEL_B32 = javaToC(JH5T_INTEL_B32);
+
+    final public static int H5T_INTEL_B64 = javaToC(JH5T_INTEL_B64);
+
+    final public static int H5T_INTEL_B8 = javaToC(JH5T_INTEL_B8);
+
+    final public static int H5T_INTEL_F32 = javaToC(JH5T_INTEL_F32);
+
+    final public static int H5T_INTEL_F64 = javaToC(JH5T_INTEL_F64);
+
+    final public static int H5T_INTEL_I16 = javaToC(JH5T_INTEL_I16);
+
+    final public static int H5T_INTEL_I32 = javaToC(JH5T_INTEL_I32);
+
+    final public static int H5T_INTEL_I64 = javaToC(JH5T_INTEL_I64);
+
+    final public static int H5T_INTEL_I8 = javaToC(JH5T_INTEL_I8);
+
+    final public static int H5T_INTEL_U16 = javaToC(JH5T_INTEL_U16);
+
+    final public static int H5T_INTEL_U32 = javaToC(JH5T_INTEL_U32);
+
+    final public static int H5T_INTEL_U64 = javaToC(JH5T_INTEL_U64);
+
+    final public static int H5T_INTEL_U8 = javaToC(JH5T_INTEL_U8);
+
+    final public static int H5T_MIPS_B16 = javaToC(JH5T_MIPS_B16);
+
+    final public static int H5T_MIPS_B32 = javaToC(JH5T_MIPS_B32);
+
+    final public static int H5T_MIPS_B64 = javaToC(JH5T_MIPS_B64);
+
+    final public static int H5T_MIPS_B8 = javaToC(JH5T_MIPS_B8);
+
+    final public static int H5T_MIPS_F32 = javaToC(JH5T_MIPS_F32);
+
+    final public static int H5T_MIPS_F64 = javaToC(JH5T_MIPS_F64);
+
+    final public static int H5T_MIPS_I16 = javaToC(JH5T_MIPS_I16);
+
+    final public static int H5T_MIPS_I32 = javaToC(JH5T_MIPS_I32);
+
+    final public static int H5T_MIPS_I64 = javaToC(JH5T_MIPS_I64);
+
+    final public static int H5T_MIPS_I8 = javaToC(JH5T_MIPS_I8);
+
+    final public static int H5T_MIPS_U16 = javaToC(JH5T_MIPS_U16);
+
+    final public static int H5T_MIPS_U32 = javaToC(JH5T_MIPS_U32);
+
+    final public static int H5T_MIPS_U64 = javaToC(JH5T_MIPS_U64);
+
+    final public static int H5T_MIPS_U8 = javaToC(JH5T_MIPS_U8);
+
+    final public static int H5T_NATIVE_B16 = javaToC(JH5T_NATIVE_B16);
+
+    final public static int H5T_NATIVE_B32 = javaToC(JH5T_NATIVE_B32);
+
+    final public static int H5T_NATIVE_B64 = javaToC(JH5T_NATIVE_B64);
+
+    final public static int H5T_NATIVE_B8 = javaToC(JH5T_NATIVE_B8);
+
+    final public static int H5T_NATIVE_CHAR = javaToC(JH5T_NATIVE_CHAR);
+
+    final public static int H5T_NATIVE_DOUBLE = javaToC(JH5T_NATIVE_DOUBLE);
+
+    final public static int H5T_NATIVE_FLOAT = javaToC(JH5T_NATIVE_FLOAT);
+
+    final public static int H5T_NATIVE_HADDR = javaToC(JH5T_NATIVE_HADDR);
+
+    final public static int H5T_NATIVE_HBOOL = javaToC(JH5T_NATIVE_HBOOL);
+
+    final public static int H5T_NATIVE_HERR = javaToC(JH5T_NATIVE_HERR);
+
+    final public static int H5T_NATIVE_HSIZE = javaToC(JH5T_NATIVE_HSIZE);
+
+    final public static int H5T_NATIVE_HSSIZE = javaToC(JH5T_NATIVE_HSSIZE);
+
+    final public static int H5T_NATIVE_INT = javaToC(JH5T_NATIVE_INT);
+
+    final public static int H5T_NATIVE_INT_FAST16 = javaToC(JH5T_NATIVE_INT_FAST16);
+
+    final public static int H5T_NATIVE_INT_FAST32 = javaToC(JH5T_NATIVE_INT_FAST32);
+
+    final public static int H5T_NATIVE_INT_FAST64 = javaToC(JH5T_NATIVE_INT_FAST64);
+
+    final public static int H5T_NATIVE_INT_FAST8 = javaToC(JH5T_NATIVE_INT_FAST8);
+
+    final public static int H5T_NATIVE_INT_LEAST16 = javaToC(JH5T_NATIVE_INT_LEAST16);
+
+    final public static int H5T_NATIVE_INT_LEAST32 = javaToC(JH5T_NATIVE_INT_LEAST32);
+
+    final public static int H5T_NATIVE_INT_LEAST64 = javaToC(JH5T_NATIVE_INT_LEAST64);
+
+    final public static int H5T_NATIVE_INT_LEAST8 = javaToC(JH5T_NATIVE_INT_LEAST8);
+
+    final public static int H5T_NATIVE_INT16 = javaToC(JH5T_NATIVE_INT16);
+
+    final public static int H5T_NATIVE_INT32 = javaToC(JH5T_NATIVE_INT32);
+
+    final public static int H5T_NATIVE_INT64 = javaToC(JH5T_NATIVE_INT64);
+
+    final public static int H5T_NATIVE_INT8 = javaToC(JH5T_NATIVE_INT8);
+
+    final public static int H5T_NATIVE_LDOUBLE = javaToC(JH5T_NATIVE_LDOUBLE);
+
+    final public static int H5T_NATIVE_LLONG = javaToC(JH5T_NATIVE_LLONG);
+
+    final public static int H5T_NATIVE_LONG = javaToC(JH5T_NATIVE_LONG);
+
+    final public static int H5T_NATIVE_OPAQUE = javaToC(JH5T_NATIVE_OPAQUE);
+
+    final public static int H5T_NATIVE_SCHAR = javaToC(JH5T_NATIVE_SCHAR);
+
+    final public static int H5T_NATIVE_SHORT = javaToC(JH5T_NATIVE_SHORT);
+
+    final public static int H5T_NATIVE_UCHAR = javaToC(JH5T_NATIVE_UCHAR);
+
+    final public static int H5T_NATIVE_UINT = javaToC(JH5T_NATIVE_UINT);
+
+    final public static int H5T_NATIVE_UINT_FAST16 = javaToC(JH5T_NATIVE_UINT_FAST16);
+
+    final public static int H5T_NATIVE_UINT_FAST32 = javaToC(JH5T_NATIVE_UINT_FAST32);
+
+    final public static int H5T_NATIVE_UINT_FAST64 = javaToC(JH5T_NATIVE_UINT_FAST64);
+
+    final public static int H5T_NATIVE_UINT_FAST8 = javaToC(JH5T_NATIVE_UINT_FAST8);
+
+    final public static int H5T_NATIVE_UINT_LEAST16 = javaToC(JH5T_NATIVE_UINT_LEAST16);
+
+    final public static int H5T_NATIVE_UINT_LEAST32 = javaToC(JH5T_NATIVE_UINT_LEAST32);
+
+    final public static int H5T_NATIVE_UINT_LEAST64 = javaToC(JH5T_NATIVE_UINT_LEAST64);
+
+    final public static int H5T_NATIVE_UINT_LEAST8 = javaToC(JH5T_NATIVE_UINT_LEAST8);
+
+    final public static int H5T_NATIVE_UINT16 = javaToC(JH5T_NATIVE_UINT16);
+
+    final public static int H5T_NATIVE_UINT32 = javaToC(JH5T_NATIVE_UINT32);
+
+    final public static int H5T_NATIVE_UINT64 = javaToC(JH5T_NATIVE_UINT64);
+
+    final public static int H5T_NATIVE_UINT8 = javaToC(JH5T_NATIVE_UINT8);
+
+    final public static int H5T_NATIVE_ULLONG = javaToC(JH5T_NATIVE_ULLONG);
+
+    final public static int H5T_NATIVE_ULONG = javaToC(JH5T_NATIVE_ULONG);
+
+    final public static int H5T_NATIVE_USHORT = javaToC(JH5T_NATIVE_USHORT);
+
+    final public static int H5T_NCLASSES = javaToC(JH5T_NCLASSES);
+
+    final public static int H5T_NO_CLASS = javaToC(JH5T_NO_CLASS);
+
+    final public static int H5T_NORM_ERROR = javaToC(JH5T_NORM_ERROR);
+
+    final public static int H5T_NORM_IMPLIED = javaToC(JH5T_NORM_IMPLIED);
+
+    final public static int H5T_NORM_MSBSET = javaToC(JH5T_NORM_MSBSET);
+
+    final public static int H5T_NORM_NONE = javaToC(JH5T_NORM_NONE);
+
+    final public static int H5T_NPAD = javaToC(JH5T_NPAD);
+
+    final public static int H5T_NSGN = javaToC(JH5T_NSGN);
+
+    final public static int H5T_OPAQUE = javaToC(JH5T_OPAQUE);
+
+    final public static int H5T_OPAQUE_TAG_MAX = javaToC(JH5T_OPAQUE_TAG_MAX); /* 1.6.5 */
+
+    final public static int H5T_ORDER_BE = javaToC(JH5T_ORDER_BE);
+
+    final public static int H5T_ORDER_ERROR = javaToC(JH5T_ORDER_ERROR);
+
+    final public static int H5T_ORDER_LE = javaToC(JH5T_ORDER_LE);
+
+    final public static int H5T_ORDER_NONE = javaToC(JH5T_ORDER_NONE);
+
+    final public static int H5T_ORDER_VAX = javaToC(JH5T_ORDER_VAX);
+
+    final public static int H5T_PAD_BACKGROUND = javaToC(JH5T_PAD_BACKGROUND);
+
+    final public static int H5T_PAD_ERROR = javaToC(JH5T_PAD_ERROR);
+
+    final public static int H5T_PAD_ONE = javaToC(JH5T_PAD_ONE);
+
+    final public static int H5T_PAD_ZERO = javaToC(JH5T_PAD_ZERO);
+
+    final public static int H5T_PERS_DONTCARE = javaToC(JH5T_PERS_DONTCARE);
+
+    final public static int H5T_PERS_HARD = javaToC(JH5T_PERS_HARD);
+
+    final public static int H5T_PERS_SOFT = javaToC(JH5T_PERS_SOFT);
+
+    final public static int H5T_REFERENCE = javaToC(JH5T_REFERENCE);
+
+    final public static int H5T_SGN_2 = javaToC(JH5T_SGN_2);
+
+    final public static int H5T_SGN_ERROR = javaToC(JH5T_SGN_ERROR);
+
+    final public static int H5T_SGN_NONE = javaToC(JH5T_SGN_NONE);
+
+    final public static int H5T_STD_B16BE = javaToC(JH5T_STD_B16BE);
+
+    final public static int H5T_STD_B16LE = javaToC(JH5T_STD_B16LE);
+
+    final public static int H5T_STD_B32BE = javaToC(JH5T_STD_B32BE);
+
+    final public static int H5T_STD_B32LE = javaToC(JH5T_STD_B32LE);
+
+    final public static int H5T_STD_B64BE = javaToC(JH5T_STD_B64BE);
+
+    final public static int H5T_STD_B64LE = javaToC(JH5T_STD_B64LE);
+
+    final public static int H5T_STD_B8BE = javaToC(JH5T_STD_B8BE);
+
+    final public static int H5T_STD_B8LE = javaToC(JH5T_STD_B8LE);
+
+    final public static int H5T_STD_I16BE = javaToC(JH5T_STD_I16BE);
+
+    final public static int H5T_STD_I16LE = javaToC(JH5T_STD_I16LE);
+
+    final public static int H5T_STD_I32BE = javaToC(JH5T_STD_I32BE);
+
+    final public static int H5T_STD_I32LE = javaToC(JH5T_STD_I32LE);
+
+    final public static int H5T_STD_I64BE = javaToC(JH5T_STD_I64BE);
+
+    final public static int H5T_STD_I64LE = javaToC(JH5T_STD_I64LE);
+
+    final public static int H5T_STD_I8BE = javaToC(JH5T_STD_I8BE);
+
+    final public static int H5T_STD_I8LE = javaToC(JH5T_STD_I8LE);
+
+    final public static int H5T_STD_REF_DSETREG = javaToC(JH5T_STD_REF_DSETREG);
+
+    final public static int H5T_STD_REF_OBJ = javaToC(JH5T_STD_REF_OBJ);
+
+    final public static int H5T_STD_U16BE = javaToC(JH5T_STD_U16BE);
+
+    final public static int H5T_STD_U16LE = javaToC(JH5T_STD_U16LE);
+
+    final public static int H5T_STD_U32BE = javaToC(JH5T_STD_U32BE);
+
+    final public static int H5T_STD_U32LE = javaToC(JH5T_STD_U32LE);
+
+    final public static int H5T_STD_U64BE = javaToC(JH5T_STD_U64BE);
+
+    final public static int H5T_STD_U64LE = javaToC(JH5T_STD_U64LE);
+
+    final public static int H5T_STD_U8BE = javaToC(JH5T_STD_U8BE);
+
+    final public static int H5T_STD_U8LE = javaToC(JH5T_STD_U8LE);
+
+    final public static int H5T_STR_ERROR = javaToC(JH5T_STR_ERROR);
+
+    final public static int H5T_STR_NULLPAD = javaToC(JH5T_STR_NULLPAD);
+
+    final public static int H5T_STR_NULLTERM = javaToC(JH5T_STR_NULLTERM);
+
+    final public static int H5T_STR_RESERVED_10 = javaToC(JH5T_STR_RESERVED_10);
+
+    final public static int H5T_STR_RESERVED_11 = javaToC(JH5T_STR_RESERVED_11);
+
+    final public static int H5T_STR_RESERVED_12 = javaToC(JH5T_STR_RESERVED_12);
+
+    final public static int H5T_STR_RESERVED_13 = javaToC(JH5T_STR_RESERVED_13);
+
+    final public static int H5T_STR_RESERVED_14 = javaToC(JH5T_STR_RESERVED_14);
+
+    final public static int H5T_STR_RESERVED_15 = javaToC(JH5T_STR_RESERVED_15);
+
+    final public static int H5T_STR_RESERVED_3 = javaToC(JH5T_STR_RESERVED_3);
+
+    final public static int H5T_STR_RESERVED_4 = javaToC(JH5T_STR_RESERVED_4);
+
+    final public static int H5T_STR_RESERVED_5 = javaToC(JH5T_STR_RESERVED_5);
+
+    final public static int H5T_STR_RESERVED_6 = javaToC(JH5T_STR_RESERVED_6);
+
+    final public static int H5T_STR_RESERVED_7 = javaToC(JH5T_STR_RESERVED_7);
+
+    final public static int H5T_STR_RESERVED_8 = javaToC(JH5T_STR_RESERVED_8);
+
+    final public static int H5T_STR_RESERVED_9 = javaToC(JH5T_STR_RESERVED_9);
+
+    final public static int H5T_STR_SPACEPAD = javaToC(JH5T_STR_SPACEPAD);
+
+    final public static int H5T_STRING = javaToC(JH5T_STRING);
+
+    final public static int H5T_TIME = javaToC(JH5T_TIME);
+
+    final public static int H5T_UNIX_D32BE = javaToC(JH5T_UNIX_D32BE);
+
+    final public static int H5T_UNIX_D32LE = javaToC(JH5T_UNIX_D32LE);
+
+    final public static int H5T_UNIX_D64BE = javaToC(JH5T_UNIX_D64BE);
+
+    final public static int H5T_UNIX_D64LE = javaToC(JH5T_UNIX_D64LE);
+
+    final public static int H5T_VARIABLE = javaToC(JH5T_VARIABLE); // Rosetta Biosoftware
+
+    final public static int H5T_VLEN = javaToC(JH5T_VLEN);
+
+    final public static int H5Z_CB_CONT = javaToC(JH5Z_CB_CONT);
+
+    final public static int H5Z_CB_ERROR = javaToC(JH5Z_CB_ERROR);
+
+    final public static int H5Z_CB_FAIL = javaToC(JH5Z_CB_FAIL);
+
+    final public static int H5Z_CB_NO = javaToC(JH5Z_CB_NO);
+
+    final public static int H5Z_DISABLE_EDC = javaToC(JH5Z_DISABLE_EDC);
+
+    final public static int H5Z_ENABLE_EDC = javaToC(JH5Z_ENABLE_EDC);
+
+    final public static int H5Z_ERROR_EDC = javaToC(JH5Z_ERROR_EDC);
+
+    final public static int H5Z_FILTER_DEFLATE = javaToC(JH5Z_FILTER_DEFLATE);
+
+    final public static int H5Z_FILTER_ERROR = javaToC(JH5Z_FILTER_ERROR);
+
+    final public static int H5Z_FILTER_FLETCHER32 = javaToC(JH5Z_FILTER_FLETCHER32);
+
+    final public static int H5Z_FILTER_MAX = javaToC(JH5Z_FILTER_MAX);
+
+    final public static int H5Z_FILTER_NONE = javaToC(JH5Z_FILTER_NONE);
+
+    final public static int H5Z_FILTER_RESERVED = javaToC(JH5Z_FILTER_RESERVED);
+
+    final public static int H5Z_FILTER_SHUFFLE = javaToC(JH5Z_FILTER_SHUFFLE);
+
+    final public static int H5Z_FILTER_SZIP = javaToC(JH5Z_FILTER_SZIP);
+
+    final public static int H5Z_FLAG_DEFMASK = javaToC(JH5Z_FLAG_DEFMASK);
+
+    final public static int H5Z_FLAG_INVMASK = javaToC(JH5Z_FLAG_INVMASK);
+
+    final public static int H5Z_FLAG_MANDATORY = javaToC(JH5Z_FLAG_MANDATORY);
+
+    final public static int H5Z_FLAG_OPTIONAL = javaToC(JH5Z_FLAG_OPTIONAL);
+
+    final public static int H5Z_FLAG_REVERSE = javaToC(JH5Z_FLAG_REVERSE);
+
+    final public static int H5Z_FLAG_SKIP_EDC = javaToC(JH5Z_FLAG_SKIP_EDC);
+
+    final public static int H5Z_MAX_NFILTERS = javaToC(JH5Z_MAX_NFILTERS);
+
+    final public static int H5Z_NO_EDC = javaToC(JH5Z_NO_EDC);
+
+    final public static int H5Z_SO_INT = javaToC(JH5Z_SO_INT);
+    
+    final public static int H5Z_SO_FLOAT_DSCALE = javaToC(JH5Z_SO_FLOAT_DSCALE);
+    
+    final public static int H5Z_SO_FLOAT_ESCALE = javaToC(JH5Z_SO_FLOAT_ESCALE);
+    
+    final public static int H5Z_FILTER_CONFIG_ENCODE_ENABLED =
+            javaToC(JH5Z_FILTER_CONFIG_ENCODE_ENABLED);
+
+    final public static int H5Z_FILTER_CONFIG_DECODE_ENABLED =
+            javaToC(JH5Z_FILTER_CONFIG_DECODE_ENABLED);
+
+    private static int javaToC(int javaConstant)
+    {
+        synchronized(ncsa.hdf.hdf5lib.H5.class)
+        {
+            return H5.J2C(javaConstant);
+        }
+    }
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5GroupInfo.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5GroupInfo.java
new file mode 100644
index 0000000..cd51de8
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDF5GroupInfo.java
@@ -0,0 +1,188 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+
+/**
+ * <p>
+ * This class is a container for the information reported about an HDF5 Object from the H5Gget_obj_info() method.
+ * <p>
+ * The fileno and objno fields contain four values which uniquely identify an object among those HDF5 files which are
+ * open: if all four values are the same between two objects, then the two objects are the same (provided both files are
+ * still open). The nlink field is the number of hard links to the object or zero when information is being returned
+ * about a symbolic link (symbolic links do not have hard links but all other objects always have at least one). The
+ * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or H5G_LINK. The mtime field contains the
+ * modification time. If information is being returned about a symbolic link then linklen will be the length of the link
+ * value (the name of the pointed-to object with the null terminator); otherwise linklen will be zero. Other fields may
+ * be added to this structure in the future.
+ * <p>
+ * For details of the HDF5 libraries, see the HDF5 Documentation at: <a
+ * href="http://hdf.ncsa.uiuc.edu/HDF5/doc/">http://hdf.ncsa.uiuc.edu/HDF5/doc/</a>
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ */
+
+public class HDF5GroupInfo
+{
+    long[] fileno;
+
+    long[] objno;
+
+    int nlink;
+
+    int type;
+
+    long mtime;
+
+    int linklen;
+
+    public HDF5GroupInfo()
+    {
+        fileno = new long[2];
+        objno = new long[2];
+        nlink = -1;
+        type = -1;
+        mtime = 0;
+        linklen = 0;
+    }
+
+    /**
+     * Sets the HDF5 group information. Used by the JHI5.
+     * 
+     * @param fn File id number
+     * @param on Object id number
+     * @param nl Number of links
+     * @param t Type of the object
+     * @param mt Modification time
+     * @param len Length of link
+     */
+    public void setGroupInfo(final long[] fn, final long[] on, final int nl, final int t,
+            final long mt, final int len)
+    {
+        fileno = fn;
+        objno = on;
+        nlink = nl;
+        type = t;
+        mtime = mt;
+        linklen = len;
+    }
+
+    /** Resets all the group information to defaults. */
+    public void reset()
+    {
+        fileno[0] = 0;
+        fileno[1] = 0;
+        objno[0] = 0;
+        objno[1] = 0;
+        nlink = -1;
+        type = -1;
+        mtime = 0;
+        linklen = 0;
+    }
+
+    /* accessors */
+    public long[] getFileno()
+    {
+        return fileno;
+    }
+
+    public long[] getObjno()
+    {
+        return objno;
+    }
+
+    public int getType()
+    {
+        return type;
+    }
+
+    public int getNlink()
+    {
+        return nlink;
+    }
+
+    public long getMtime()
+    {
+        return mtime;
+    }
+
+    public int getLinklen()
+    {
+        return linklen;
+    }
+
+    /**
+     * The fileno and objno fields contain four values which uniquely identify an object among those HDF5 files.
+     */
+    @Override
+    public boolean equals(final Object obj)
+    {
+        if ((obj instanceof HDF5GroupInfo) == false)
+        {
+            return false;
+        }
+
+        final HDF5GroupInfo target = (HDF5GroupInfo) obj;
+        if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1])
+                && (objno[0] == target.objno[0]) && (objno[1] == target.objno[1]))
+        {
+            return true;
+        } else
+        {
+            return false;
+        }
+    }
+
+    @Override
+    public int hashCode()
+    {
+        final HashCodeBuilder builder = new HashCodeBuilder();
+        builder.append(fileno);
+        builder.append(objno);
+        return builder.toHashCode();
+    }
+
+    /**
+     * Returns the object id.
+     */
+    public long getOID()
+    {
+        return objno[0];
+    }
+
+    /**
+     * /** Converts this object to a String representation.
+     * 
+     * @return a string representation of this object
+     */
+    @Override
+    public String toString()
+    {
+        String fileStr = "fileno=null";
+        String objStr = "objno=null";
+
+        if (fileno != null)
+        {
+            fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1];
+        }
+
+        if (objno != null)
+        {
+            objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1];
+        }
+
+        return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink="
+                + nlink + ",mtime=" + mtime + ",linklen=" + linklen + "]";
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDFNativeData.java b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDFNativeData.java
new file mode 100644
index 0000000..3013ca2
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/HDFNativeData.java
@@ -0,0 +1,318 @@
+/****************************************************************************
+ * NCSA HDF                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                   *
+ *                                                                          *
+ ****************************************************************************/
+
+package ch.systemsx.cisd.hdf5.hdf5lib;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+
+/**
+ * This class provides a convenience interface to {@link NativeData}.
+ * <p>
+ * <b>This is an internal API that should not be expected to be stable between releases!</b>
+ */
+
+public class HDFNativeData
+{
+
+    static final int pointerSize;
+    
+    static
+    {
+        NativeData.ensureNativeLibIsLoaded();
+        pointerSize = H5.getPointerSize();
+    }
+    
+
+    /**
+     * Converts a <code>byte</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] byteToByte(byte data)
+    {
+        return new byte[]
+            { data };
+    }
+
+    /**
+     * Converts a <code>short</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] shortToByte(short data)
+    {
+        return NativeData.shortToByte(new short[] { data }, ByteOrder.NATIVE); 
+    }
+
+    /**
+     * Converts an <code>int</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] intToByte(int data)
+    {
+        return NativeData.intToByte(new int[] { data }, ByteOrder.NATIVE); 
+    }
+
+    /**
+     * Converts a <code>long</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] longToByte(long data)
+    {
+        return NativeData.longToByte(new long[] { data }, ByteOrder.NATIVE); 
+    }
+
+    /**
+     * Converts a <code>float</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] floatToByte(float data)
+    {
+        return NativeData.floatToByte(new float[] { data }, ByteOrder.NATIVE); 
+    }
+
+    /**
+     * Converts a <code>double</code> value into a <code>byte[]</code>.
+     * 
+     * @param data The value to convert.
+     * @return The array containing the value.
+     */
+    public static byte[] doubleToByte(double data)
+    {
+        return NativeData.doubleToByte(new double[] { data }, ByteOrder.NATIVE); 
+    }
+
+    /**
+     * Converts a range of a <code>byte[]</code> to a <code>short</code> value.
+     * 
+     * @param byteArr The value to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @return The <code>short</code> value.
+     */
+    public static short byteToShort(byte[] byteArr, int start)
+    {
+        return NativeData.byteToShort(byteArr, ByteOrder.NATIVE, start, 1)[0];
+    }
+
+    /**
+     * Converts a <code>byte[]</code> array into a <code>short[]</code> array.
+     * 
+     * @param byteArr The <code>byte[]</code> to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @param len The number of <code>short</code> values to convert.
+     * @return The <code>short[]</code> array.
+     */
+    public static short[] byteToShort(byte[] byteArr, int start, int len)
+    {
+        return NativeData.byteToShort(byteArr, ByteOrder.NATIVE, start, len);
+    }
+
+    /**
+     * Converts a range of a <code>byte[]</code> to a <code>int</code> value.
+     * 
+     * @param byteArr The value to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @return The <code>int</code> value.
+     */
+    public static int byteToInt(byte[] byteArr, int start)
+    {
+        return NativeData.byteToInt(byteArr, ByteOrder.NATIVE, start, 1)[0];
+    }
+
+    /**
+     * Converts a <code>byte[]</code> array into an <code>int[]</code> array.
+     * 
+     * @param byteArr The <code>byte[]</code> to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @param len The number of <code>int</code> values to convert.
+     * @return The <code>int[]</code> array.
+     */
+    public static int[] byteToInt(byte[] byteArr, int start, int len)
+    {
+        return NativeData.byteToInt(byteArr, ByteOrder.NATIVE, start, len);
+    }
+
+    /**
+     * Converts a range of a <code>byte[]</code> to a <code>long</code> value.
+     * 
+     * @param byteArr The value to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @return The <code>long</code> value.
+     */
+    public static long byteToLong(byte[] byteArr, int start)
+    {
+        return NativeData.byteToLong(byteArr, ByteOrder.NATIVE, start, 1)[0];
+    }
+
+    /**
+     * Converts a <code>byte[]</code> array into a <code>long[]</code> array.
+     * 
+     * @param byteArr The <code>byte[]</code> to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @param len The number of <code>long</code> values to convert.
+     * @return The <code>long[]</code> array.
+     */
+    public static long[] byteToLong(byte[] byteArr, int start, int len)
+    {
+        return NativeData.byteToLong(byteArr, ByteOrder.NATIVE, start, len);
+    }
+
+    /**
+     * Converts a range of a <code>byte[]</code> to a <code>float</code> value.
+     * 
+     * @param byteArr The value to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @return The <code>float</code> value.
+     */
+    public static float byteToFloat(byte[] byteArr, int start)
+    {
+        return NativeData.byteToFloat(byteArr, ByteOrder.NATIVE, start, 1)[0];
+    }
+
+    /**
+     * Converts a <code>byte[]</code> array into a <code>float[]</code> array.
+     * 
+     * @param byteArr The <code>byte[]</code> to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @param len The number of <code>float</code> values to convert.
+     * @return The <code>float[]</code> array.
+     */
+    public static float[] byteToFloat(byte[] byteArr, int start, int len)
+    {
+        return NativeData.byteToFloat(byteArr, ByteOrder.NATIVE, start, len);
+    }
+
+    /**
+     * Converts a range of a <code>byte[]</code> to a <code>double</code> value.
+     * 
+     * @param byteArr The value to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @return The <code>double</code> value.
+     */
+    public static double byteToDouble(byte[] byteArr, int start)
+    {
+        return NativeData.byteToDouble(byteArr, ByteOrder.NATIVE, start, 1)[0];
+    }
+
+    /**
+     * Converts a <code>byte[]</code> array into a <code>double[]</code> array.
+     * 
+     * @param byteArr The <code>byte[]</code> to convert.
+     * @param start The position in the <var>byteArr</var> to start the conversion.
+     * @param len The number of <code>double</code> values to convert.
+     * @return The <code>double[]</code> array.
+     */
+    public static double[] byteToDouble(byte[] byteArr, int start, int len)
+    {
+        return NativeData.byteToDouble(byteArr, ByteOrder.NATIVE, start, len);
+    }
+
+    /**
+     * Converts a <code>short[]</code> array to a <code>byte[]</code> array.
+     * 
+     *  @param data The <code>long[]</code> array to convert.
+     *  @return The <code>byte[]</code> array that corresponding to the <code>short[]</code> array. 
+     */
+    public static byte[] shortToByte(short[] data)
+    {
+        return NativeData.shortToByte(data, ByteOrder.NATIVE);
+    }
+
+    /**
+     * Converts an <code>int[]</code> array to a <code>byte[]</code> array.
+     * 
+     *  @param data The <code>long[]</code> array to convert.
+     *  @return The <code>byte[]</code> array that corresponding to the <code>int[]</code> array. 
+     */
+    public static byte[] intToByte(int[] data)
+    {
+        return NativeData.intToByte(data, ByteOrder.NATIVE);
+    }
+
+    /**
+     * Converts a <code>long[]</code> array to a <code>byte[]</code> array.
+     * 
+     *  @param data The <code>long[]</code> array to convert.
+     *  @return The <code>byte[]</code> array that corresponding to the <code>long[]</code> array. 
+     */
+    public static byte[] longToByte(long[] data)
+    {
+        return NativeData.longToByte(data, ByteOrder.NATIVE);
+    }
+
+    /**
+     * Converts a <code>float[]</code> array to a <code>byte[]</code> array.
+     * 
+     *  @param data The <code>long[]</code> array to convert.
+     *  @return The <code>byte[]</code> array that corresponding to the <code>float[]</code> array. 
+     */
+    public static byte[] floatToByte(float[] data)
+    {
+        return NativeData.floatToByte(data, ByteOrder.NATIVE);
+    }
+
+    /**
+     * Converts a <code>double[]</code> array to a <code>byte[]</code> array.
+     * 
+     *  @param data The <code>long[]</code> array to convert.
+     *  @return The <code>byte[]</code> array that corresponding to the <code>double[]</code> array. 
+     */
+    public static byte[] doubleToByte(double[] data)
+    {
+        return NativeData.doubleToByte(data, ByteOrder.NATIVE);
+    }
+
+    // String copying methods
+    
+    
+    /**
+     * Returns the size of a machine word on this platform.
+     */
+    public static int getMachineWordSize()
+    {
+        return pointerSize;
+    }
+    
+    /**
+     * Creates a C copy of str (using calloc) and put the reference of it into buf at bufOfs.
+     */
+    public static int compoundCpyVLStr(String str, byte[] buf, int bufOfs)
+    {
+        return H5.compoundCpyVLStr(str, buf, bufOfs);
+    }
+    
+    /**
+     * Creates a Java copy from a C char* pointer in the buf at bufOfs. 
+     */
+    public static String createVLStrFromCompound(byte[] buf, int bufOfs)
+    {
+        return H5.createVLStrFromCompound(buf, bufOfs);
+    }
+    
+    /**
+     * Frees the variable-length strings in compound buf, where one compound has size recordSize and the 
+     * variable-length members can be found at byte-offsets vlIndices.
+     */
+    public static int freeCompoundVLStr(byte[] buf, int recordSize, int[] vlIndices)
+    {
+        return H5.freeCompoundVLStr(buf, recordSize, vlIndices);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/hdf5lib/package.html b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/package.html
new file mode 100644
index 0000000..77f9f7c
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/hdf5lib/package.html
@@ -0,0 +1,153 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!--NewPage-->
+<HTML>
+<HEAD>
+<!-- Generated by javadoc (build 1.6.0_26) on Sun Aug 28 16:02:20 CEST 2011 -->
+<TITLE>
+ch.systemsx.cisd.hdf5.hdf5lib
+</TITLE>
+
+<META NAME="date" CONTENT="2011-08-28">
+
+</HEAD>
+
+<BODY BGCOLOR="white">
+<h3>
+Introduction
+</h3>
+This package is the low-level Java interface for the CISD HDF5 library. It is derived from the
+the low-level Java interface of the HDF Group's HDF-Java library.
+<p> 
+<i>It should be considered an internal interface and is likely to change in future versions.</i>
+ <p>
+ This code is called by Java programs to access the entry points of the HDF5 library. Each
+ routine wraps a single HDF5 entry point, generally with the arguments and return codes analogous
+ to the C interface.
+ <p>
+ For details of the HDF5 library, see the HDF5 Documentation at: <a
+ href="http://hdf.ncsa.uiuc.edu/HDF5/">http://hdf.ncsa.uiuc.edu/HDF5/</a>
+ <hr>
+ <p>
+ <b>Mapping of arguments for Java</b>
+ <p>
+ In general, arguments to the HDF Java API are straightforward translations from the 'C' API
+ described in the HDF Reference Manual.
+ <p>
+ <center>
+ <table border=2 cellpadding=2>
+ <caption><b>HDF-5 C types to Java types</b> </caption>
+ <tr>
+ <td><b>HDF-5</b></td>
+ <td><b>Java</b></td>
+ </tr>
+ <tr>
+ <td>H5T_NATIVE_INT</td>
+ <td>int, Integer</td>
+ </tr>
+ <tr>
+ <td>H5T_NATIVE_SHORT</td>
+ <td>short, Short</td>
+ </tr>
+ <tr>
+ <td>H5T_NATIVE_FLOAT</td>
+ <td>float, Float</td>
+ </tr>
+ <tr>
+ <td>H5T_NATIVE_DOUBLE</td>
+ <td>double, Double</td>
+ </tr>
+ <tr>
+ <td>H5T_NATIVE_CHAR</td>
+ <td>byte, Byte</td>
+ </tr>
+ <tr>
+ <td>H5T_C_S1</td>
+ <td>java.lang.String</td>
+ </tr>
+ <tr>
+ <td>void * <BR>
+ (i.e., pointer to `Any')</td>
+ <td>Special -- see HDFArray</td>
+ </tr>
+ </table>
+ </center>
+ <p>
+ <center> <b>General Rules for Passing Arguments and Results</b> </center>
+ <p>
+ In general, arguments passed <b>IN</b> to Java are the analogous basic types, as above. The
+ exception is for arrays, which are discussed below.
+ <p>
+ The <i>return value</i> of Java methods is also the analogous type, as above. A major exception
+ to that rule is that all HDF functions that return SUCCEED/FAIL are declared <i>boolean</i> in
+ the Java version, rather than <i>int</i> as in the C. Functions that return a value or else FAIL
+ are declared the equivalent to the C function. However, in most cases the Java method will raise
+ an exception instead of returning an error code. See <a href="#ERRORS">Errors and Exceptions</a>
+ below.
+ <p>
+ Java does not support pass by reference of arguments, so arguments that are returned through
+ <b>OUT</b> parameters must be wrapped in an object or array. The Java API for HDF consistently
+ wraps arguments in arrays.
+ <p>
+ For instance, a function that returns two integers is declared:
+ <p>
+ 
+ <pre>
+       h_err_t HDF5dummy( int *a1, int *a2)
+ </pre>
+ 
+ For the Java interface, this would be declared:
+ <p>
+ 
+ <pre>
+ public static native int HDF5dummy(int args[]);
+ </pre>
+ 
+ where <i>a1</i> is <i>args[0]</i> and <i>a2</i> is <i>args[1]</i>, and would be invoked:
+ <p>
+ 
+ <pre>
+ H5.HDF5dummy(a);
+ </pre>
+ <p>
+ All the routines where this convention is used will have specific documentation of the details,
+ given below.
+ <p>
+ <a NAME="CONSTANTS"> <b>HDF-5 Constants</b>
+ <p>
+ The HDF-5 API defines a set of constants and enumerated values. Most of these values are
+ available to Java programs via the class <code>HDF55Constants</code>. For example, the parameters 
+ for the h5open() call include two numeric values, <code>HDF5Constants.H5F_ACC_RDWR</code> and 
+ <code>HDF5Constants.H5P_DEFAULT</code>. As would be expected, these numbers correspond to the 
+ C constants <code>H5F_ACC_RDWR</code> and <code>H5P_DEFAULT</code>.
+ <p>
+ The HDF-5 API defines a set of values that describe number types and sizes, such as
+ "H5T_NATIVE_INT" and "hsize_t". These values are determined at run time by the HDF-5 C library.
+ To support these parameters, the Java class <code>HDF55Constants</code> looks up the values when 
+ initiated. The values can be accessed as public variables of the Java class, such as:
+ 
+ <pre>
+ int data_type = HDF55Constants.JH5T_NATIVE_INT;
+ </pre>
+ 
+ The Java application uses both types of constants the same way, the only difference is that the
+ <code>HDF55Constants</code> may have different values on different platforms.
+ <p>
+ <a NAME="ERRORS"> <b>Error handling and Exceptions</b>
+ <p>
+ The HDF5 error API (H5E) manages the behavior of the error stack in the HDF-5 library. This API
+ is omitted from the JHI5. Errors are converted into Java exceptions. This is totally different
+ from the C interface, but is very natural for Java programming.
+ <p>
+ The exceptions of the JHI5 are organized as sub-classes of the class <code>HDF5Exception</code>. 
+ There are two subclasses of <code>HDF5Exception</code>, <code>HDF5LibraryException</code>
+ and <code>HDF5JavaException</code>. The sub-classes of the former represent errors from the HDF-5 
+ C library, while sub-classes of the latter represent errors in the JHI5 wrapper and support code.
+ <p>
+ The super-class <code>HDF5LibraryException</code> implements the method
+ '<code>printStackTrace()</code>', which prints out the HDF-5 error stack, as described in the
+ HDF-5 C API <i><b>H5Eprint()</b>.</i> This may be used by Java exception handlers to print out
+ the HDF-5 error stack.
+ <hr>
+
+</BODY>
+</HTML>
diff --git a/source/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFile.java b/source/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFile.java
new file mode 100644
index 0000000..27c361a
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFile.java
@@ -0,0 +1,868 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.io;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.Flushable;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5FileNotFoundException;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.base.io.IRandomAccessFile;
+import ch.systemsx.cisd.hdf5.HDF5DataClass;
+import ch.systemsx.cisd.hdf5.HDF5DataSetInformation;
+import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures;
+import ch.systemsx.cisd.hdf5.HDF5OpaqueType;
+import ch.systemsx.cisd.hdf5.HDF5StorageLayout;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * A {@link IRandomAccessFile} backed by an HDF5 dataset. The HDF5 dataset needs to be a byte array
+ * (or opaque byte array) of rank 1.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5DataSetRandomAccessFile implements IRandomAccessFile, Flushable
+{
+    private final IHDF5Reader reader;
+
+    private final IHDF5Writer writerOrNull;
+
+    private final String dataSetPath;
+
+    private final HDF5DataSetInformation dataSetInfo;
+
+    private final HDF5OpaqueType opaqueTypeOrNull;
+
+    private final int blockSize;
+
+    private final boolean extendable;
+
+    private final boolean closeReaderOnCloseFile;
+
+    private long length;
+
+    private int realBlockSize;
+
+    private byte[] block;
+
+    private long blockOffset;
+
+    private int positionInBlock;
+
+    private boolean blockDirty;
+
+    private long blockOffsetMark = -1;
+
+    private int positionInBlockMark = -1;
+
+    private boolean extensionPending;
+
+    private ch.systemsx.cisd.base.convert.NativeData.ByteOrder byteOrder =
+            ch.systemsx.cisd.base.convert.NativeData.ByteOrder.BIG_ENDIAN;
+
+    /**
+     * Creates a new HDF5DataSetRandomAccessFile for the given hdf5File and dataSetPath.
+     */
+    HDF5DataSetRandomAccessFile(File hdf5File, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int size, String opaqueTagOrNull,
+            boolean readOnly)
+    {
+        this(createHDF5ReaderOrWriter(hdf5File, readOnly), dataSetPath, creationStorageFeature,
+                size, opaqueTagOrNull, true);
+    }
+
+    private static IHDF5Reader createHDF5ReaderOrWriter(File hdf5File, boolean readOnly)
+    {
+        try
+        {
+            if (readOnly)
+            {
+                return HDF5FactoryProvider.get().openForReading(hdf5File);
+            } else
+            {
+                return HDF5FactoryProvider.get().open(hdf5File);
+            }
+        } catch (HDF5FileNotFoundException ex)
+        {
+            throw new IOExceptionUnchecked(new FileNotFoundException(ex.getMessage()));
+        } catch (HDF5Exception ex)
+        {
+            throw new IOExceptionUnchecked(ex);
+        }
+    }
+
+    /**
+     * Creates a new HDF5DataSetRandomAccessFile for the given reader and dataSetPath.
+     * <p>
+     * If <code>reader instanceof IHDF5Writer</code>, the random access file will be in read-write
+     * mode, else it will be in readonly mode.
+     */
+    HDF5DataSetRandomAccessFile(IHDF5Reader reader, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int size, String opaqueTagOrNull,
+            boolean closeReaderOnCloseFile) throws IOExceptionUnchecked
+    {
+        this.closeReaderOnCloseFile = closeReaderOnCloseFile;
+        final boolean readOnly = (reader instanceof IHDF5Writer) == false;
+        try
+        {
+            if (readOnly)
+            {
+                this.reader = reader;
+                this.writerOrNull = null;
+            } else
+            {
+                this.writerOrNull = (IHDF5Writer) reader;
+                this.writerOrNull.file().addFlushable(this);
+                this.reader = writerOrNull;
+                if (writerOrNull.exists(dataSetPath) == false)
+                {
+                    long maxSize = requiresFixedMaxSize(creationStorageFeature) ? size : 0;
+                    if (opaqueTagOrNull == null)
+                    {
+                        writerOrNull.int8().createArray(dataSetPath, maxSize, size,
+                                HDF5IntStorageFeatures.createFromGeneric(creationStorageFeature));
+                    } else
+                    {
+                        writerOrNull.opaque().createArray(dataSetPath, opaqueTagOrNull, maxSize,
+                                size, creationStorageFeature);
+                    }
+                }
+            }
+        } catch (HDF5Exception ex)
+        {
+            throw new IOExceptionUnchecked(ex);
+        }
+        this.dataSetPath = dataSetPath;
+        this.dataSetInfo = reader.getDataSetInformation(dataSetPath);
+        if (readOnly == false
+                && dataSetInfo.getTypeInformation().getDataClass() == HDF5DataClass.OPAQUE)
+        {
+            this.opaqueTypeOrNull = reader.opaque().tryGetOpaqueType(dataSetPath);
+        } else
+        {
+            this.opaqueTypeOrNull = null;
+        }
+        if (dataSetInfo.getRank() != 1)
+        {
+            throw new IOExceptionUnchecked("Dataset has wrong rank (r=" + dataSetInfo.getRank()
+                    + ")");
+        }
+        if (dataSetInfo.getTypeInformation().getElementSize() != 1)
+        {
+            throw new IOExceptionUnchecked("Dataset has wrong element size (size="
+                    + dataSetInfo.getTypeInformation().getElementSize() + " bytes)");
+        }
+        this.length = dataSetInfo.getSize();
+
+        // Chunked data sets are read chunk by chunk, other layouts are read completely.
+        if (dataSetInfo.getStorageLayout() == HDF5StorageLayout.CHUNKED)
+        {
+            this.blockSize = dataSetInfo.tryGetChunkSizes()[0];
+        } else
+        {
+            // Limitation: we do not yet handle the case of contiguous data sets larger than 2GB
+            if ((int) length != length())
+            {
+                throw new IOExceptionUnchecked("Dataset is too large (size=" + length + " bytes)");
+
+            }
+            this.blockSize = (int) length;
+        }
+        this.extendable = (dataSetInfo.getStorageLayout() == HDF5StorageLayout.CHUNKED);
+        this.blockOffset = 0;
+        this.block = new byte[blockSize];
+        this.realBlockSize = -1;
+        this.positionInBlock = 0;
+    }
+
+    private static boolean requiresFixedMaxSize(HDF5GenericStorageFeatures features)
+    {
+        return features.tryGetProposedLayout() != null
+                && features.tryGetProposedLayout() != HDF5StorageLayout.CHUNKED;
+    }
+
+    private void ensureInitalizedForWriting(int lenCurrentOp) throws IOExceptionUnchecked
+    {
+        if (realBlockSize < 0)
+        {
+            realBlockSize = blockSize;
+            long minLen = blockOffset + realBlockSize;
+            final long oldLength = length();
+            if (minLen > oldLength)
+            {
+                realBlockSize = Math.min(realBlockSize, lenCurrentOp);
+                minLen = blockOffset + realBlockSize;
+                if (minLen > oldLength)
+                {
+                    setLength(minLen);
+                }
+            }
+            if ((oldLength - blockSize) > 0)
+            {
+                try
+                {
+                    this.realBlockSize =
+                            reader.opaque().readArrayToBlockWithOffset(dataSetPath, block,
+                                    realBlockSize, blockOffset, 0);
+                } catch (HDF5Exception ex)
+                {
+                    throw new IOExceptionUnchecked(ex);
+                }
+            } else
+            {
+                Arrays.fill(block, (byte) 0);
+            }
+        }
+    }
+
+    private void ensureInitalizedForReading() throws IOExceptionUnchecked
+    {
+        if (realBlockSize < 0)
+        {
+            try
+            {
+                this.realBlockSize =
+                        reader.opaque().readArrayToBlockWithOffset(dataSetPath, block, blockSize,
+                                blockOffset, 0);
+            } catch (HDF5Exception ex)
+            {
+                throw new IOExceptionUnchecked(ex);
+            }
+        }
+    }
+
+    private void readBlock(long newBlockOffset) throws IOExceptionUnchecked
+    {
+        if (newBlockOffset != blockOffset)
+        {
+            flush();
+            try
+            {
+                this.realBlockSize =
+                        reader.opaque().readArrayToBlockWithOffset(dataSetPath, block, blockSize,
+                                newBlockOffset, 0);
+            } catch (HDF5Exception ex)
+            {
+                throw new IOExceptionUnchecked(ex);
+            }
+            this.blockOffset = newBlockOffset;
+        }
+    }
+
+    private void readNextBlockResetPosition()
+    {
+        readBlock(blockOffset + realBlockSize);
+        this.positionInBlock = 0;
+    }
+
+    private boolean eof()
+    {
+        return (available() == 0);
+    }
+
+    private void checkEoFOnWrite()
+    {
+        if (extendable == false && eof())
+        {
+            throw new IOExceptionUnchecked(new EOFException("Dataset is EOF and not extendable."));
+        }
+    }
+
+    public File getHdf5File()
+    {
+        return reader.file().getFile();
+    }
+
+    public String getDataSetPath()
+    {
+        return dataSetPath;
+    }
+
+    /**
+     * Returns <code>true</code> if the HDF5 file has been opened in read-only mode.
+     */
+    public boolean isReadOnly()
+    {
+        return (writerOrNull == null);
+    }
+
+    private void extend(int numberOfBytesToExtend) throws IOExceptionUnchecked
+    {
+        final long len = length();
+        final long pos = getFilePointer();
+        final long newLen = pos + numberOfBytesToExtend;
+        if (newLen > len)
+        {
+            if (extendable == false)
+            {
+                throw new IOExceptionUnchecked("Unable to extend dataset from " + len + " to "
+                        + newLen + ": dataset is not extenable.");
+            }
+            setLength(pos + numberOfBytesToExtend);
+        }
+    }
+
+    private void checkWrite(int lenCurrentOp) throws IOExceptionUnchecked
+    {
+        ensureInitalizedForWriting(lenCurrentOp);
+        checkWriteDoNotExtend();
+        if (extensionPending)
+        {
+            setLength(blockOffset + positionInBlock);
+        }
+    }
+
+    private void checkWriteDoNotExtend() throws IOExceptionUnchecked
+    {
+        if (isReadOnly())
+        {
+            throw new IOExceptionUnchecked("HDF5 dataset opened in read-only mode.");
+        }
+    }
+
+    @Override
+    public long getFilePointer() throws IOExceptionUnchecked
+    {
+        return blockOffset + positionInBlock;
+    }
+
+    @Override
+    public int read() throws IOExceptionUnchecked
+    {
+        ensureInitalizedForReading();
+        if (positionInBlock == realBlockSize)
+        {
+            if (eof())
+            {
+                return -1;
+            }
+            readNextBlockResetPosition();
+            if (eof())
+            {
+                return -1;
+            }
+        }
+        return block[positionInBlock++] & 0xff;
+    }
+
+    @Override
+    public int read(byte[] b) throws IOExceptionUnchecked
+    {
+        return read(b, 0, b.length);
+    }
+
+    @Override
+    public int read(byte[] b, int off, int len) throws IOExceptionUnchecked
+    {
+        ensureInitalizedForReading();
+        int realLen = getRealLen(len);
+        if (realLen == 0)
+        {
+            return -1;
+        }
+        int bytesLeft = realLen;
+        int currentOff = off;
+        while (bytesLeft > 0)
+        {
+            final int lenInBlock = Math.min(bytesLeft, bytesLeftInBlock());
+            System.arraycopy(block, positionInBlock, b, currentOff, lenInBlock);
+            positionInBlock += lenInBlock;
+            currentOff += lenInBlock;
+            bytesLeft -= lenInBlock;
+            if (bytesLeft > 0)
+            {
+                readNextBlockResetPosition();
+            }
+        }
+        return realLen;
+    }
+
+    private int bytesLeftInBlock()
+    {
+        return (realBlockSize - positionInBlock);
+    }
+
+    private int getRealLen(int len)
+    {
+        return Math.min(len, available());
+    }
+
+    private long getRealLen(long len)
+    {
+        return Math.min(len, available());
+    }
+
+    @Override
+    public long skip(long n) throws IOExceptionUnchecked
+    {
+        final long realN = getRealLen(n);
+        seek(getFilePointer() + realN);
+        return realN;
+    }
+
+    @Override
+    public int available()
+    {
+        return (int) Math.min(availableLong(), Integer.MAX_VALUE);
+    }
+
+    private long availableLong()
+    {
+        return length() - getFilePointer();
+    }
+
+    @Override
+    public void close() throws IOExceptionUnchecked
+    {
+        flush();
+        if (closeReaderOnCloseFile)
+        {
+            try
+            {
+                reader.close();
+            } catch (HDF5Exception ex)
+            {
+                throw new IOExceptionUnchecked(ex);
+            }
+        } else if (writerOrNull != null)
+        {
+            writerOrNull.file().removeFlushable(this);
+        }
+    }
+
+    @Override
+    public void mark(int readlimit)
+    {
+        this.blockOffsetMark = blockOffset;
+        this.positionInBlockMark = positionInBlock;
+    }
+
+    @Override
+    public void reset() throws IOExceptionUnchecked
+    {
+        if (blockOffsetMark < 0)
+        {
+            throw new IOExceptionUnchecked(new IOException("Stream not marked."));
+        }
+        readBlock(blockOffsetMark);
+        this.positionInBlock = positionInBlockMark;
+    }
+
+    @Override
+    public boolean markSupported()
+    {
+        return true;
+    }
+
+    @Override
+    public void flush() throws IOExceptionUnchecked
+    {
+        if (isReadOnly() == false && blockDirty)
+        {
+            try
+            {
+                if (opaqueTypeOrNull != null)
+                {
+                    writerOrNull.opaque().writeArrayBlockWithOffset(dataSetPath, opaqueTypeOrNull,
+                            block, realBlockSize, blockOffset);
+                } else
+                {
+                    writerOrNull.int8().writeArrayBlockWithOffset(dataSetPath, block,
+                            realBlockSize, blockOffset);
+                }
+            } catch (HDF5Exception ex)
+            {
+                throw new IOExceptionUnchecked(ex);
+            }
+            blockDirty = false;
+        }
+    }
+
+    @Override
+    public void synchronize() throws IOExceptionUnchecked
+    {
+        if (writerOrNull != null)
+        {
+            flush();
+            try
+            {
+                writerOrNull.file().flushSyncBlocking();
+            } catch (HDF5Exception ex)
+            {
+                throw new IOExceptionUnchecked(ex);
+            }
+        }
+    }
+
+    @Override
+    public ByteOrder getByteOrder()
+    {
+        return byteOrder == ch.systemsx.cisd.base.convert.NativeData.ByteOrder.BIG_ENDIAN ? ByteOrder.BIG_ENDIAN
+                : ByteOrder.LITTLE_ENDIAN;
+    }
+
+    @Override
+    public void setByteOrder(ByteOrder byteOrder)
+    {
+        if (byteOrder == ByteOrder.BIG_ENDIAN)
+        {
+            this.byteOrder = ch.systemsx.cisd.base.convert.NativeData.ByteOrder.BIG_ENDIAN;
+        } else
+        {
+            this.byteOrder = ch.systemsx.cisd.base.convert.NativeData.ByteOrder.LITTLE_ENDIAN;
+        }
+    }
+
+    @Override
+    public void seek(long pos) throws IOExceptionUnchecked
+    {
+        if (pos < 0)
+        {
+            throw new IOExceptionUnchecked("New position may not be negative.");
+        }
+        if (isReadOnly() && pos >= length())
+        {
+            throw new IOExceptionUnchecked(
+                    "In read-only mode, new position may not be larger than file size.");
+        }
+        final long newBlockOffset = (pos / blockSize) * blockSize;
+        this.positionInBlock = (int) (pos % blockSize);
+        if (newBlockOffset < length())
+        {
+            readBlock(newBlockOffset);
+        } else
+        {
+            this.blockOffset = newBlockOffset;
+            this.realBlockSize = positionInBlock + 1;
+        }
+        if (pos >= length())
+        {
+            this.extensionPending = true;
+        }
+    }
+
+    @Override
+    public long length() throws IOExceptionUnchecked
+    {
+        return length;
+    }
+
+    @Override
+    public void setLength(long newLength) throws IOExceptionUnchecked
+    {
+        checkWriteDoNotExtend();
+        if (extendable == false)
+        {
+            throw new IOExceptionUnchecked("setLength() called on non-extendable dataset.");
+        }
+        try
+        {
+            writerOrNull.object().setDataSetSize(dataSetPath, newLength);
+        } catch (HDF5Exception ex)
+        {
+            throw new IOExceptionUnchecked(ex);
+        }
+        length = newLength;
+    }
+
+    @Override
+    public void readFully(byte[] b) throws IOExceptionUnchecked
+    {
+        readFully(b, 0, b.length);
+    }
+
+    @Override
+    public void readFully(byte[] b, int off, int len) throws IOExceptionUnchecked
+    {
+        final int bytesRead = read(b, off, len);
+        if (bytesRead != len)
+        {
+            throw new IOExceptionUnchecked(new EOFException());
+        }
+    }
+
+    @Override
+    public int skipBytes(int n) throws IOExceptionUnchecked
+    {
+        return (int) skip(n);
+    }
+
+    @Override
+    public boolean readBoolean() throws IOExceptionUnchecked
+    {
+        return readUnsignedByte() != 0;
+    }
+
+    @Override
+    public byte readByte() throws IOExceptionUnchecked
+    {
+        return (byte) readUnsignedByte();
+    }
+
+    @Override
+    public int readUnsignedByte() throws IOExceptionUnchecked
+    {
+        final int b = read();
+        if (b < 0)
+        {
+            throw new IOExceptionUnchecked(new EOFException());
+        }
+        return b;
+    }
+
+    @Override
+    public short readShort() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.SHORT_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToShort(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public int readUnsignedShort() throws IOExceptionUnchecked
+    {
+        return readShort() & 0xffff;
+    }
+
+    @Override
+    public char readChar() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.CHAR_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToChar(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public int readInt() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.INT_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToInt(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public long readLong() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.LONG_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToLong(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public float readFloat() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.FLOAT_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToFloat(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public double readDouble() throws IOExceptionUnchecked
+    {
+        final byte[] byteArr = new byte[NativeData.DOUBLE_SIZE];
+        readFully(byteArr);
+        return NativeData.byteToDouble(byteArr, byteOrder)[0];
+    }
+
+    @Override
+    public String readLine() throws IOExceptionUnchecked
+    {
+        final StringBuilder builder = new StringBuilder();
+        int b;
+        boolean byteRead = false;
+        while ((b = read()) >= 0)
+        {
+            byteRead = true;
+            final char c = (char) b;
+            if (c == '\r')
+            {
+                continue;
+            }
+            if (c == '\n')
+            {
+                break;
+            }
+            builder.append(c);
+        }
+        if (byteRead == false)
+        {
+            return null;
+        } else
+        {
+            return builder.toString();
+        }
+    }
+
+    @Override
+    public String readUTF() throws IOExceptionUnchecked
+    {
+        try
+        {
+            final byte[] strBuf = new byte[readUnsignedShort()];
+            readFully(strBuf);
+            return new String(strBuf, "UTF-8");
+        } catch (UnsupportedEncodingException ex)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+        }
+    }
+
+    @Override
+    public void write(int b) throws IOExceptionUnchecked
+    {
+        checkWrite(1);
+        extend(1);
+        if (positionInBlock == realBlockSize)
+        {
+            checkEoFOnWrite();
+            readNextBlockResetPosition();
+            checkEoFOnWrite();
+        }
+        block[positionInBlock++] = (byte) b;
+        blockDirty = true;
+    }
+
+    @Override
+    public void write(byte[] b) throws IOExceptionUnchecked
+    {
+        write(b, 0, b.length);
+    }
+
+    @Override
+    public void write(byte[] b, int off, int len) throws IOExceptionUnchecked
+    {
+        checkWrite(len);
+        extend(len);
+        int bytesLeft = len;
+        int currentOff = off;
+        while (bytesLeft > 0)
+        {
+            final int lenInBlock = Math.min(bytesLeft, bytesLeftInBlock());
+            System.arraycopy(b, currentOff, block, positionInBlock, lenInBlock);
+            blockDirty = true;
+            positionInBlock += lenInBlock;
+            currentOff += lenInBlock;
+            bytesLeft -= lenInBlock;
+            if (bytesLeft > 0)
+            {
+                readNextBlockResetPosition();
+            }
+        }
+    }
+
+    @Override
+    public void writeBoolean(boolean v) throws IOExceptionUnchecked
+    {
+        write(v ? 1 : 0);
+    }
+
+    @Override
+    public void writeByte(int v) throws IOExceptionUnchecked
+    {
+        write(v);
+    }
+
+    @Override
+    public void writeShort(int v) throws IOExceptionUnchecked
+    {
+        write(NativeData.shortToByte(new short[]
+            { (short) v }, byteOrder));
+    }
+
+    @Override
+    public void writeChar(int v) throws IOExceptionUnchecked
+    {
+        write(NativeData.charToByte(new char[]
+            { (char) v }, byteOrder));
+    }
+
+    @Override
+    public void writeInt(int v) throws IOExceptionUnchecked
+    {
+        write(NativeData.intToByte(new int[]
+            { v }, byteOrder));
+    }
+
+    @Override
+    public void writeLong(long v) throws IOExceptionUnchecked
+    {
+        write(NativeData.longToByte(new long[]
+            { v }, byteOrder));
+    }
+
+    @Override
+    public void writeFloat(float v) throws IOExceptionUnchecked
+    {
+        write(NativeData.floatToByte(new float[]
+            { v }, byteOrder));
+    }
+
+    @Override
+    public void writeDouble(double v) throws IOExceptionUnchecked
+    {
+        write(NativeData.doubleToByte(new double[]
+            { v }, byteOrder));
+    }
+
+    @Override
+    public void writeBytes(String s) throws IOExceptionUnchecked
+    {
+        for (int i = 0; i < s.length(); i++)
+        {
+            write((byte) s.charAt(i));
+        }
+    }
+
+    @Override
+    public void writeChars(String s) throws IOExceptionUnchecked
+    {
+        for (int i = 0; i < s.length(); i++)
+        {
+            final char v = s.charAt(i);
+            write((byte) ((v >>> 8) & 0xFF));
+            write((byte) ((v >>> 0) & 0xFF));
+        }
+    }
+
+    @Override
+    public void writeUTF(String str) throws IOExceptionUnchecked
+    {
+        try
+        {
+            final byte[] strBuf = str.getBytes("UTF-8");
+            writeShort(strBuf.length);
+            write(strBuf);
+        } catch (UnsupportedEncodingException ex)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+        }
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/io/HDF5IOAdapterFactory.java b/source/java/ch/systemsx/cisd/hdf5/io/HDF5IOAdapterFactory.java
new file mode 100644
index 0000000..bf006ac
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/io/HDF5IOAdapterFactory.java
@@ -0,0 +1,373 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.io;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import ch.systemsx.cisd.base.io.AdapterIInputStreamToInputStream;
+import ch.systemsx.cisd.base.io.AdapterIOutputStreamToOutputStream;
+import ch.systemsx.cisd.base.io.IInputStream;
+import ch.systemsx.cisd.base.io.IOutputStream;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * A factory of I/O adapter for HDF5 data sets.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5IOAdapterFactory
+{
+
+    private static final String OPAQUE_TAG_FILE = "FILE";
+
+    private final static int BUFFER_SIZE = 1024 * 1024;
+
+    //
+    // File methods
+    //
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link IOutputStream}.
+     * <p>
+     * If the dataset does not yet exist, it will create a chunked opaque dataset with a chunk size
+     * of 1MB and an opaque tag <code>FILE</code>.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link IOutputStream}.
+     */
+    public static IOutputStream asIOutputStream(File hdf5File, String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link IOutputStream}.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link IOutputStream}.
+     */
+    public static IOutputStream asIOutputStream(File hdf5File, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int chunkSize, String opaqueTagOrNull)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath, creationStorageFeature,
+                chunkSize, opaqueTagOrNull, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link HDF5DataSetRandomAccessFile} in
+     * read/write mode.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFileReadWrite(File hdf5File,
+            String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link OutputStream}.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link OutputStream}.
+     */
+    public static OutputStream asOutputStream(File hdf5File, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int chunkSize, String opaqueTagOrNull)
+    {
+        return new AdapterIOutputStreamToOutputStream(asIOutputStream(hdf5File, dataSetPath,
+                creationStorageFeature, chunkSize, opaqueTagOrNull));
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link IOutputStream}.
+     * <p>
+     * If the dataset does not yet exist, it will create a chunked opaque dataset with a chunk size
+     * of 1MB and an opaque tag <code>FILE</code>.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link OutputStream}.
+     */
+    public static OutputStream asOutputStream(File hdf5File, String dataSetPath)
+    {
+        return new AdapterIOutputStreamToOutputStream(asIOutputStream(hdf5File, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE));
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link HDF5DataSetRandomAccessFile} in
+     * read/write mode.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFile(File hdf5File, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int chunkSize, String opaqueTagOrNull)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath, creationStorageFeature,
+                chunkSize, opaqueTagOrNull, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link IInputStream}.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link IInputStream}.
+     */
+    public static IInputStream asIInputStream(File hdf5File, String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath, null, 0, null, true);
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link InputStream}.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link InputStream}.
+     */
+    public static InputStream asInputStream(File hdf5File, String dataSetPath)
+    {
+        return new AdapterIInputStreamToInputStream(asIInputStream(hdf5File, dataSetPath));
+    }
+
+    /**
+     * Creates an adapter of the <var>hdf5File</var> as an {@link HDF5DataSetRandomAccessFile} in
+     * read-only mode.
+     * 
+     * @param hdf5File The HDF5 file to create the adapter for.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFileReadOnly(File hdf5File,
+            String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(hdf5File, dataSetPath, null, 0, null, true);
+    }
+
+    //
+    // Writer methods
+    //
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link IOutputStream}.
+     * <p>
+     * If the dataset does not yet exist, it will create a chunked opaque dataset with a chunk size
+     * of 1MB and an opaque tag <code>FILE</code>.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link IOutputStream}.
+     */
+    public static IOutputStream asIOutputStream(IHDF5Writer writer, String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(writer, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link IOutputStream}.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link IOutputStream}.
+     */
+    public static IOutputStream asIOutputStream(IHDF5Writer writer, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int chunkSize, String opaqueTagOrNull)
+    {
+        return new HDF5DataSetRandomAccessFile(writer, dataSetPath, creationStorageFeature,
+                chunkSize, opaqueTagOrNull, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link IOutputStream}.
+     * <p>
+     * If the dataset does not yet exist, it will create a chunked opaque dataset with a chunk size
+     * of 1MB and an opaque tag <code>FILE</code>.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link OutputStream}.
+     */
+    public static OutputStream asOutputStream(IHDF5Writer writer, String dataSetPath)
+    {
+        return new AdapterIOutputStreamToOutputStream(asIOutputStream(writer, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE));
+    }
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link OutputStream}.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link OutputStream}.
+     */
+    public static OutputStream asOutputStream(IHDF5Writer writer, String dataSetPath,
+            HDF5GenericStorageFeatures creationStorageFeature, int chunkSize, String opaqueTagOrNull)
+    {
+        return new AdapterIOutputStreamToOutputStream(asIOutputStream(writer, dataSetPath,
+                creationStorageFeature, chunkSize, opaqueTagOrNull));
+    }
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link HDF5DataSetRandomAccessFile}.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @param creationStorageFeature If the dataset does not yet exist, use this value as the
+     *            storage features when creating it.
+     * @param chunkSize If the dataset does not yet exist, use this value as the chunk size.
+     * @param opaqueTagOrNull If the dataset does not yet exist and this value is not
+     *            <code>null</code>, then an opaque dataset will be created using this value will be
+     *            used as opaque tag.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFile(IHDF5Writer writer,
+            String dataSetPath, HDF5GenericStorageFeatures creationStorageFeature, int chunkSize,
+            String opaqueTagOrNull)
+    {
+        return new HDF5DataSetRandomAccessFile(writer, dataSetPath, creationStorageFeature,
+                chunkSize, opaqueTagOrNull, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>writer</var> as an {@link HDF5DataSetRandomAccessFile}.
+     * <p>
+     * <b>Note that returned object is buffered. Do not access <var>dataSetPath</var> by directly
+     * accessing <var>writer</var> while this object is used or else the behavior is undefined!</b>
+     * 
+     * @param writer The HDF5 writer to create the adapter for. The writer will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFile(IHDF5Writer writer,
+            String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(writer, dataSetPath,
+                HDF5GenericStorageFeatures.GENERIC_CHUNKED, BUFFER_SIZE, OPAQUE_TAG_FILE, false);
+    }
+
+    //
+    // Reader methods
+    //
+
+    /**
+     * Creates an adapter of the <var>reader</var> as an {@link IInputStream}.
+     * 
+     * @param reader The HDF5 reader to create the adapter for. The reader will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link IInputStream}.
+     */
+    public static IInputStream asIInputStream(IHDF5Reader reader, String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(reader, dataSetPath, null, 0, null, false);
+    }
+
+    /**
+     * Creates an adapter of the <var>reader</var> as an {@link InputStream}.
+     * 
+     * @param reader The HDF5 reader to create the adapter for. The reader will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link InputStream}.
+     */
+    public static InputStream asInputStream(IHDF5Reader reader, String dataSetPath)
+    {
+        return new AdapterIInputStreamToInputStream(asIInputStream(reader, dataSetPath));
+    }
+
+    /**
+     * Creates an adapter of the <var>reader</var> as an {@link HDF5DataSetRandomAccessFile}.
+     * 
+     * @param reader The HDF5 reader to create the adapter for. The reader will <i>not be closed
+     *            when the returned object is closed.
+     * @param dataSetPath The path of the HDF5 dataset in the HDF5 container to use as a file.
+     * @return The {@link HDF5DataSetRandomAccessFile}.
+     */
+    public static HDF5DataSetRandomAccessFile asRandomAccessFile(IHDF5Reader reader,
+            String dataSetPath)
+    {
+        return new HDF5DataSetRandomAccessFile(reader, dataSetPath, null, 0, null, false);
+    }
+
+}
diff --git a/source/java/ch/systemsx/cisd/hdf5/io/package.html b/source/java/ch/systemsx/cisd/hdf5/io/package.html
new file mode 100644
index 0000000..ce5a5d9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/io/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+     "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+  <head>
+    <title>I/O Package</title>
+  </head>
+  <body>
+    <p>
+    This package contains classes to access an HDF5 file like an Operating System file. 
+    </p>
+  </body>
+</html> 
\ No newline at end of file
diff --git a/source/java/ch/systemsx/cisd/hdf5/package.html b/source/java/ch/systemsx/cisd/hdf5/package.html
new file mode 100644
index 0000000..b7228b9
--- /dev/null
+++ b/source/java/ch/systemsx/cisd/hdf5/package.html
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+     "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+  <head>
+    <title>Main Package</title>
+  </head>
+  <body>
+    <p>
+    Main package that contains the JHDF5 <a href="IHDF5Reader.html">reader</a> and <a href="IHDF5Writer.html">writer</a>. 
+    </p>
+  </body>
+</html> 
\ No newline at end of file
diff --git a/source/java/ncsa/hdf/hdf5lib/H5.java b/source/java/ncsa/hdf/hdf5lib/H5.java
new file mode 100644
index 0000000..509fdc2
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/H5.java
@@ -0,0 +1,9215 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib;
+
+import java.nio.ByteBuffer;
+import java.util.Vector;
+
+import ncsa.hdf.hdf5lib.callbacks.H5D_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5D_iterate_t;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_t;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5AC_cache_config_t;
+import ncsa.hdf.hdf5lib.structs.H5A_info_t;
+import ncsa.hdf.hdf5lib.structs.H5G_info_t;
+import ncsa.hdf.hdf5lib.structs.H5L_info_t;
+import ncsa.hdf.hdf5lib.structs.H5O_info_t;
+
+
+/**
+ * This class is the Java interface for the HDF5 library.
+ * <p>
+ * This code is the called by Java programs to access the entry points of the
+ * HDF5 library. Each routine wraps a single HDF5 entry point, generally
+ * with the arguments and return codes analogous to the C interface.
+ * <p>
+ * For details of the HDF5 library, see the HDF5 Documentation at: <a
+ * href="http://hdfgroup.org/HDF5/">http://hdfgroup.org/HDF5/</a>
+ * <hr>
+ * <p>
+ * <b>Mapping of arguments for Java</b>
+ * 
+ * <p>
+ * In general, arguments to the HDF Java API are straightforward translations
+ * from the 'C' API described in the HDF Reference Manual.
+ * <p>
+ * 
+ * <center>
+ * <table border=2 cellpadding=2>
+ * <caption><b>HDF-5 C types to Java types</b> </caption>
+ * <tr>
+ * <td><b>HDF-5</b></td>
+ * <td><b>Java</b></td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_INT</td>
+ * <td>int, Integer</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_SHORT</td>
+ * <td>short, Short</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_FLOAT</td>
+ * <td>float, Float</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_DOUBLE</td>
+ * <td>double, Double</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_CHAR</td>
+ * <td>byte, Byte</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_C_S1</td>
+ * <td>java.lang.String</td>
+ * </tr>
+ * <tr>
+ * <td>void * <BR>
+ * (i.e., pointer to `Any')</td>
+ * <td>Special -- see HDFArray</td>
+ * </tr>
+ * </table>
+ * </center>
+ * <p>
+ * <center> <b>General Rules for Passing Arguments and Results</b> </center>
+ * <p>
+ * In general, arguments passed <b>IN</b> to Java are the analogous basic types,
+ * as above. The exception is for arrays, which are discussed below.
+ * <p>
+ * The <i>return value</i> of Java methods is also the analogous type, as above.
+ * A major exception to that rule is that all HDF functions that return
+ * SUCCEED/FAIL are declared <i>boolean</i> in the Java version, rather than
+ * <i>int</i> as in the C. Functions that return a value or else FAIL are
+ * declared the equivalent to the C function. However, in most cases the Java
+ * method will raise an exception instead of returning an error code. See <a
+ * href="#ERRORS">Errors and Exceptions</a> below.
+ * <p>
+ * Java does not support pass by reference of arguments, so arguments that are
+ * returned through <b>OUT</b> parameters must be wrapped in an object or array.
+ * The Java API for HDF consistently wraps arguments in arrays.
+ * <p>
+ * For instance, a function that returns two integers is declared:
+ * <p>
+ * 
+ * <pre>
+ *       h_err_t HDF5dummy( int *a1, int *a2)
+ * </pre>
+ * 
+ * For the Java interface, this would be declared:
+ * <p>
+ * 
+ * <pre>
+ * public synchronized static native int HDF5dummy(int args[]);
+ * </pre>
+ * 
+ * where <i>a1</i> is <i>args[0]</i> and <i>a2</i> is <i>args[1]</i>, and would
+ * be invoked:
+ * <p>
+ * 
+ * <pre>
+ * H5.HDF5dummy(a);
+ * </pre>
+ * 
+ * <p>
+ * All the routines where this convention is used will have specific
+ * documentation of the details, given below.
+ * <p>
+ * <a NAME="ARRAYS"> <b>Arrays</b> </a>
+ * <p>
+ * HDF5 needs to read and write multi-dimensional arrays of any number type (and
+ * records). The HDF5 API describes the layout of the source and destination,
+ * and the data for the array passed as a block of bytes, for instance,
+ * <p>
+ * 
+ * <pre>
+ *      herr_t H5Dread(int fid, int filetype, int memtype, int memspace,
+ *      void * data);
+ * </pre>
+ * 
+ * <p>
+ * where ``void *'' means that the data may be any valid numeric type, and is a
+ * contiguous block of bytes that is the data for a multi-dimensional array. The
+ * other parameters describe the dimensions, rank, and datatype of the array on
+ * disk (source) and in memory (destination).
+ * <p>
+ * For Java, this ``ANY'' is a problem, as the type of data must always be
+ * declared. Furthermore, multidimensional arrays are definitely <i>not</i>
+ * layed out contiguously in memory. It would be infeasible to declare a
+ * separate routine for every combination of number type and dimensionality. For
+ * that reason, the <a
+ * href="./ncsa.hdf.hdf5lib.HDFArray.html"><b>HDFArray</b></a> class is used to
+ * discover the type, shape, and size of the data array at run time, and to
+ * convert to and from a contiguous array of bytes in synchronized static native
+ * C order.
+ * <p>
+ * The upshot is that any Java array of numbers (either primitive or sub-classes
+ * of type <b>Number</b>) can be passed as an ``Object'', and the Java API will
+ * translate to and from the appropriate packed array of bytes needed by the C
+ * library. So the function above would be declared:
+ * <p>
+ * 
+ * <pre>
+ * public synchronized static native int H5Dread(int fid, int filetype,
+ *         int memtype, int memspace, Object data);
+ * </pre>
+ * 
+ * and the parameter <i>data</i> can be any multi-dimensional array of numbers,
+ * such as float[][], or int[][][], or Double[][].
+ * <p>
+ * <a NAME="CONSTANTS"> <b>HDF-5 Constants</b>
+ * <p>
+ * The HDF-5 API defines a set of constants and enumerated values. Most of these
+ * values are available to Java programs via the class <a
+ * href="./ncsa.hdf.hdf5lib.HDF5Constants.html"> <b>HDF5Constants</b></a>. For
+ * example, the parameters for the h5open() call include two numeric values,
+ * <b><i>HDFConstants.H5F_ACC_RDWR</i></b> and
+ * <b><i>HDF5Constants.H5P_DEFAULT</i></b>. As would be expected, these numbers
+ * correspond to the C constants <b><i>H5F_ACC_RDWR</i></b> and
+ * <b><i>H5P_DEFAULT</i></b>.
+ * <p>
+ * The HDF-5 API defines a set of values that describe number types and sizes,
+ * such as "H5T_NATIVE_INT" and "hsize_t". These values are determined at run
+ * time by the HDF-5 C library. To support these parameters, the Java class <a
+ * href="./ncsa.hdf.hdf5lib.HDF5CDataTypes.html"> <b>HDF5CDataTypes</b></a>
+ * looks up the values when initiated. The values can be accessed as public
+ * variables of the Java class, such as:
+ * 
+ * <pre>
+ * int data_type = HDF5CDataTypes.JH5T_NATIVE_INT;
+ * </pre>
+ * 
+ * The Java application uses both types of constants the same way, the only
+ * difference is that the <b><i>HDF5CDataTypes</i></b> may have different values
+ * on different platforms.
+ * <p>
+ * <a NAME="ERRORS"> <b>Error handling and Exceptions</b>
+ * <p>
+ * The HDF5 error API (H5E) manages the behavior of the error stack in the HDF-5
+ * library. This API is omitted from the JHI5. Errors are converted into Java
+ * exceptions. This is totally different from the C interface, but is very
+ * natural for Java programming.
+ * <p>
+ * The exceptions of the JHI5 are organized as sub-classes of the class <a
+ * href="./ncsa.hdf.hdf5lib.exceptions.HDF5Exception.html">
+ * <b>HDF5Exception</b></a>. There are two subclasses of <b>HDF5Exception</b>,
+ * <a href="./ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException.html">
+ * <b>HDF5LibraryException</b></a> and <a
+ * href="./ncsa.hdf.hdf5lib.exceptions.HDF5JavaException.html">
+ * <b>HDF5JavaException</b></a>. The sub-classes of the former represent errors
+ * from the HDF-5 C library, while sub-classes of the latter represent errors in
+ * the JHI5 wrapper and support code.
+ * <p>
+ * The super-class <b><i>HDF5LibraryException</i></b> implements the method
+ * '<b><i>printStackTrace()</i></b>', which prints out the HDF-5 error stack, as
+ * described in the HDF-5 C API <i><b>H5Eprint()</b>.</i> This may be used by
+ * Java exception handlers to print out the HDF-5 error stack.
+ * <hr>
+ * 
+ * @version HDF5 1.2 <BR>
+ *          <b>See also: <a href ="./ncsa.hdf.hdf5lib.HDFArray.html"> </b>
+ *          ncsa.hdf.hdf5lib.HDFArray</a><BR>
+ *          <a href ="./ncsa.hdf.hdf5lib.HDF5Constants.html"> </b>
+ *          ncsa.hdf.hdf5lib.HDF5Constants</a><BR>
+ *          <a href ="./ncsa.hdf.hdf5lib.HDF5CDataTypes.html"> </b>
+ *          ncsa.hdf.hdf5lib.HDF5CDataTypes</a><BR>
+ *          <a href ="./ncsa.hdf.hdf5lib.HDF5Exception.html">
+ *          ncsa.hdf.hdf5lib.HDF5Exception<BR>
+ *          <a href="http://hdfgroup.org/HDF5/">
+ *          http://hdfgroup.org/HDF5"</a>
+ **/
+ at SuppressWarnings("all")
+public class H5 implements java.io.Serializable {
+    /**
+     * 
+     */
+    private static final long serialVersionUID = 6129888282117053288L;
+
+    /**
+     * The version number of the HDF5 library: <br />
+     * LIB_VERSION[0]: The major version of the library.<br />
+     * LIB_VERSION[1]: The minor version of the library.<br />
+     * LIB_VERSION[2]: The release number of the library.<br />
+     * 
+     * Make sure to update the versions number when a different library is used.
+     */
+    public final static int LIB_VERSION[] = { 1, 8, 13 };
+
+    public final static String H5PATH_PROPERTY_KEY = "ncsa.hdf.hdf5lib.H5.hdf5lib";
+
+    // add system property to load library by name from library path, via
+    // System.loadLibrary()
+    public final static String H5_LIBRARY_NAME_PROPERTY_KEY = "ncsa.hdf.hdf5lib.H5.loadLibraryName";
+    private static String s_libraryName;
+    private static boolean isLibraryLoaded = false;
+    
+    private final static boolean IS_CRITICAL_PINNING = true;
+    
+    private final static Vector<Integer> OPEN_IDS = new Vector<Integer>();
+
+    static {
+        loadH5Lib();
+    }
+
+    public static void loadH5Lib() {
+        ch.systemsx.cisd.hdf5.hdf5lib.H5General.ensureNativeLibIsLoaded();
+    }
+
+// ////////////////////////////////////////////////////////////
+//                                                           //
+// H5: General Library Functions                             //
+//                                                           //
+// ////////////////////////////////////////////////////////////
+
+/**
+ * Get number of open IDs.
+ */
+public final static int getOpenIDCount()
+{
+    return OPEN_IDS.size();
+}
+
+/**
+ * Get the open ID at the specified index.
+ * 
+ * @param index -- an index of the open ID.
+ * @return Returns the open ID at the specified index.
+ */
+public final static int getOpenID(int index)
+{
+    int id = -1;
+    if (index >= 0 && index < OPEN_IDS.size())
+        id = OPEN_IDS.elementAt(index);
+    
+    return id;
+}
+
+/**
+ * H5check_version verifies that the arguments match the version numbers
+ * compiled into the library.
+ * 
+ * @param majnum
+ *            The major version of the library.
+ * @param minnum
+ *            The minor version of the library.
+ * @param relnum
+ *            The release number of the library.
+ * @return a non-negative value if successful. Upon failure (when the
+ *         versions do not match), this function causes the application to
+ *         abort (i.e., crash)
+ * 
+ *         See C API function: herr_t H5check_version()
+ **/
+public synchronized static native int H5check_version(int majnum,
+        int minnum, int relnum);
+
+/**
+ * H5close flushes all data to disk, closes all file identifiers, and cleans
+ * up all memory used by the library.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5close() throws HDF5LibraryException;
+
+/**
+ * H5open initialize the library.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5open() throws HDF5LibraryException;
+
+/**
+ * H5dont_atexit indicates to the library that an atexit() cleanup routine
+ * should not be installed. In order to be effective, this routine must be
+ * called before any other HDF function calls, and must be called each time
+ * the library is loaded/linked into the application (the first time and
+ * after it's been unloaded).
+ * <P>
+ * This is called by the static initializer, so this should never need to be
+ * explicitly called by a Java program.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+private synchronized static native int H5dont_atexit()
+        throws HDF5LibraryException;
+
+/**
+ * Turn off error handling By default, the C library prints the error stack
+ * of the HDF-5 C library on stdout. This behavior may be disabled by
+ * calling H5error_off().
+ */
+public synchronized static native int H5error_off();
+
+/**
+ * H5garbage_collect collects on all free-lists of all types.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5garbage_collect()
+        throws HDF5LibraryException;
+
+/**
+ * H5get_libversion retrieves the major, minor, and release numbers of the
+ * version of the HDF library which is linked to the application.
+ * 
+ * @param libversion
+ *            The version information of the HDF library.
+ * 
+ *            <pre>
+ *      libversion[0] = The major version of the library.
+ *      libversion[1] = The minor version of the library.
+ *      libversion[2] = The release number of the library.
+ * </pre>
+ * @return a non-negative value if successful, along with the version
+ *         information.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5get_libversion(int[] libversion)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5set_free_list_limits(
+        int reg_global_lim, int reg_list_lim, int arr_global_lim,
+        int arr_list_lim, int blk_global_lim, int blk_list_lim)
+        throws HDF5LibraryException;
+
+/**
+ * H5export_dataset is a utility function to save data in a file.
+ * 
+ * @param file_export_name
+ *            The file name to export data into.
+ * @param file_name
+ *            The name of the HDF5 file containing the dataset.
+ * @param object_path
+ *            The full path of the dataset to be exported.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5export_dataset(String file_export_name, String file_name, String object_path, int binary_order) 
+        throws HDF5LibraryException;
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5A: HDF5 1.8 Attribute Interface API Functions           //
+////
+//////////////////////////////////////////////////////////////
+
+/**
+ * H5Aclose terminates access to the attribute specified by its identifier,
+ * attr_id.
+ * 
+ * @param attr_id
+ *            IN: Attribute to release access to.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Aclose(int attr_id) throws HDF5LibraryException
+{
+    if (attr_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(attr_id);
+    return _H5Aclose(attr_id);
+}
+
+private synchronized static native int _H5Aclose(int attr_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Acopy copies the content of one attribute to another.
+ * 
+ * @param src_aid
+ *            the identifier of the source attribute
+ * @param dst_aid
+ *            the identifier of the destination attribute
+ */
+public synchronized static native int H5Acopy(int src_aid, int dst_aid)
+        throws HDF5LibraryException;
+
+/**
+* H5Acreate creates an attribute which is attached to the object specified
+* with loc_id.
+* 
+* @deprecated As of HDF5 1.8, replaced by {@link #H5Acreate( int, String, int, int, int, int) }
+* 
+* @param loc_id
+*            IN: Object (dataset, group, or named datatype) to be attached
+*            to.
+* @param name
+*            IN: Name of attribute to create.
+* @param type_id
+*            IN: Identifier of datatype for attribute.
+* @param space_id
+*            IN: Identifier of dataspace for attribute.
+* @param create_plist
+*            IN: Identifier of creation property list (currently not used).
+* 
+* @return an attribute identifier if successful
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+* @exception NullPointerException
+*                - name is null.
+**/
+ at Deprecated
+public static int H5Acreate(int loc_id, String name, int type_id,
+     int space_id, int create_plist)
+     throws HDF5LibraryException, NullPointerException
+{
+ int id = _H5Acreate(loc_id, name, type_id, space_id, create_plist);
+ if (id > 0)
+     OPEN_IDS.addElement(id);
+ return id;
+}
+
+private synchronized static native int _H5Acreate(int loc_id, String name,
+     int type_id, int space_id, int create_plist)
+     throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Acreate creates an attribute, attr_name, which is attached to the object specified by the identifier loc_id.  
+* 
+* @param loc_id            IN: Location or object identifier; may be dataset or group 
+* @param attr_name         IN: Attribute name 
+* @param type_id           IN: Attribute datatype identifier 
+* @param space_id          IN: Attribute dataspace identifier
+* @param acpl_id           IN: Attribute creation property list identifier 
+* @param aapl_id           IN: Attribute access property list identifier 
+* 
+* @return  An attribute identifier if successful; otherwise returns a negative value. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - Name is null.
+**/
+public static int H5Acreate( int loc_id, String attr_name, int type_id, int space_id, int acpl_id, int aapl_id )
+throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Acreate2(loc_id, attr_name, type_id, space_id, acpl_id, aapl_id );
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+/**
+* H5Acreate2 an attribute, attr_name, which is attached to the object
+* specified by the identifier loc_id.
+* 
+* @see public static int H5Acreate( int loc_id, String attr_name, int
+*      type_id, int space_id, int acpl_id, int aapl_id )
+**/
+private synchronized static native int _H5Acreate2( int loc_id, String attr_name, int type_id, int space_id, int acpl_id, int aapl_id ) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Acreate_by_name creates an attribute, attr_name, which is attached to the object specified by loc_id and obj_name.
+*
+* @param loc_id             IN: Location or object identifier; may be dataset or group
+* @param obj_name           IN: Name, relative to loc_id, of object that attribute is to be attached to
+* @param attr_name          IN: Attribute name 
+* @param type_id            IN: Attribute datatype identifier
+* @param space_id           IN: Attribute dataspace identifier 
+* @param acpl_id            IN: Attribute creation property list identifier (currently not used).
+* @param aapl_id            IN: Attribute access property list identifier (currently not used).
+* @param lapl_id            IN: Link access property list 
+*
+* @return  An attribute identifier if successful; otherwise returns a negative value.
+*
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - name is null.
+**/
+public static int H5Acreate_by_name(int loc_id, String obj_name, String attr_name, int type_id, int space_id, int acpl_id, int aapl_id, int lapl_id) 
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Acreate_by_name(loc_id, obj_name, attr_name, type_id, space_id, acpl_id, aapl_id, lapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Acreate_by_name(int loc_id, String obj_name, String attr_name, int type_id, int space_id, 
+        int acpl_id, int aapl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Adelete removes the attribute specified by its name, name, from a
+ * dataset, group, or named datatype.
+ * 
+ * @param loc_id
+ *            IN: Identifier of the dataset, group, or named datatype.
+ * @param name
+ *            IN: Name of the attribute to delete.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Adelete(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Adelete_by_idx removes an attribute, specified by its location in an index, from an object.
+*
+*  @param loc_id             IN: Location or object identifier; may be dataset or group 
+*  @param obj_name           IN: Name of object, relative to location, from which attribute is to be removed 
+*  @param idx_type           IN: Type of index  
+*  @param order              IN: Order in which to iterate over index
+*  @param n                  IN: Offset within index  
+*  @param lapl_id            IN: Link access property list identifier 
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - obj_name is null.
+**/
+public synchronized static native void H5Adelete_by_idx(int loc_id, String obj_name, int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Adelete_by_name removes the attribute attr_name from an object specified by location and name, loc_id and obj_name, respectively. 
+*
+* @param loc_id             IN: Location or object identifier; may be dataset or group
+* @param obj_name           IN: Name of object, relative to location, from which attribute is to be removed
+* @param attr_name          IN: Name of attribute to delete
+* @param lapl_id            IN: Link access property list identifier.
+*
+* @return a non-negative value if successful; otherwise returns a negative value.
+*
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - name is null.
+**/
+public synchronized static native int H5Adelete_by_name(int loc_id, String obj_name, String attr_name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Aexists determines whether the attribute attr_name exists on the object specified by obj_id.
+*
+* @param obj_id               IN: Object identifier.
+* @param attr_name            IN: Name of the attribute.
+*
+* @return boolean true if an attribute with a given name exists.
+*
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - attr_name is null.
+**/
+public synchronized static native boolean H5Aexists(int obj_id, String attr_name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Aexists_by_name determines whether the attribute attr_name exists on an object. That object is specified by its location and name, 
+* loc_id and obj_name, respectively.
+*
+* @param loc_id             IN: Location of object to which attribute is attached .
+* @param obj_name           IN: Name, relative to loc_id, of object that attribute is attached to.
+* @param attr_name          IN: Name of attribute.
+* @param lapl_id            IN: Link access property list identifier.
+*
+* @return boolean true if an attribute with a given name exists, otherwise returns false.
+*
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - name is null.
+**/
+public synchronized static native boolean H5Aexists_by_name(int loc_id, String obj_name, String attr_name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aget_info retrieves attribute information, by attribute identifier. 
+ * 
+ * @param attr_id            IN: Attribute identifier 
+ * 
+ * @return  A buffer(H5A_info_t) for Attribute information 
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native H5A_info_t H5Aget_info(int attr_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Aget_info_by_idx Retrieves attribute information, by attribute index position. 
+* 
+* @param loc_id         IN: Location of object to which attribute is attached 
+* @param obj_name       IN: Name of object to which attribute is attached, relative to location
+* @param idx_type       IN: Type of index 
+* @param order          IN: Index traversal order
+* @param n              IN: Attribute's position in index 
+* @param lapl_id        IN: Link access property list
+*  
+* @return  A buffer(H5A_info_t) for Attribute information 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - obj_name is null.
+**/
+public synchronized static native H5A_info_t H5Aget_info_by_idx(int loc_id, String obj_name, int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Aget_info_by_name Retrieves attribute information, by attribute name. 
+* 
+* @param loc_id         IN: Location of object to which attribute is attached 
+* @param obj_name       IN: Name of object to which attribute is attached, relative to location
+* @param attr_name      IN: Attribute name
+* @param lapl_id        IN: Link access property list
+*  
+* @return  A buffer(H5A_info_t) for Attribute information 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - obj_name is null.
+**/
+public synchronized static native H5A_info_t H5Aget_info_by_name(int loc_id, String obj_name, String attr_name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aget_name retrieves the name of an attribute specified by the
+ * identifier, attr_id.
+ * 
+ * @param attr_id
+ *            IN: Identifier of the attribute.
+ * @param buf_size
+ *            IN: The size of the buffer to store the name in.
+ * @param name
+ *            OUT: Buffer to store name in.
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                JNI error writing back array
+ * @exception ArrayStoreException
+ *                JNI error writing back array
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ * @exception IllegalArgumentException
+ *                - bub_size <= 0.
+ * 
+ * @return the length of the attribute's name if successful.
+ **/
+public synchronized static native long H5Aget_name(int attr_id,
+        long buf_size, String[] name)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+public static long H5Aget_name(int attr_id, String[] name)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException,
+        IllegalArgumentException
+{
+    long len = H5Aget_name(attr_id, 0, null);
+    
+    return H5Aget_name(attr_id, len+1, name);
+}
+
+/**
+* H5Aget_name_by_idx retrieves the name of an attribute that is attached to an object, which is specified by its location and name, 
+* loc_id and obj_name, respectively.
+* 
+* @param attr_id            IN: Attribute identifier 
+* @param obj_name           IN: Name of object to which attribute is attached, relative to location  
+* @param idx_type           IN: Type of index 
+* @param order              IN: Index traversal order  
+* @param n                  IN: Attribute's position in index
+* @param lapl_id            IN: Link access property list 
+* 
+* @return  String for Attribute name. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - obj_name is null.
+**/
+public synchronized static native String H5Aget_name_by_idx(int attr_id, String obj_name, int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aget_num_attrs returns the number of attributes attached to the object
+ * specified by its identifier, loc_id.
+ * 
+ * @deprecated As of HDF5 1.8,   replaced by {@link #H5Oget_info( int )}
+ * 
+ * @param loc_id
+ *            IN: Identifier of a group, dataset, or named datatype.
+ * 
+ * @return the number of attributes if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+ at Deprecated
+public synchronized static native int H5Aget_num_attrs(int loc_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Aget_space retrieves a copy of the dataspace for an attribute.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute.
+ * 
+ * @return attribute dataspace identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Aget_space(int attr_id) throws HDF5LibraryException
+{
+    int id = _H5Aget_space(attr_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aget_space(int attr_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Aget_storage_size returns the amount of storage that is required for the specified attribute, attr_id.
+* 
+* @param attr_id            IN: Identifier of the attribute to query.
+* 
+* @return the amount of storage size allocated for the attribute; otherwise returns 0 (zero)
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public synchronized static native long H5Aget_storage_size(int attr_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Aget_type retrieves a copy of the datatype for an attribute.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute.
+ * 
+ * @return a datatype identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Aget_type(int attr_id) throws HDF5LibraryException
+{
+    int id = _H5Aget_type(attr_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aget_type(int attr_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Aopen opens an existing attribute, attr_name, that is attached to an object specified an object identifier, object_id.
+ * 
+ * @param obj_id            IN: Identifier for object to which attribute is attached 
+ * @param attr_name         IN: Name of attribute to open  
+ * @param aapl_id           IN: Attribute access property list identifier 
+ * 
+ * @return  An attribute identifier if successful; otherwise returns a negative value. 
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - Name is null.
+ **/
+public static int H5Aopen(int obj_id, String attr_name, int aapl_id)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Aopen(obj_id, attr_name, aapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aopen(int obj_id, String attr_name, int aapl_id) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aopen_by_idx opens an existing attribute that is attached to an object specified by location and name, loc_id and obj_name, respectively
+ * 
+ * @param loc_id            IN: Location of object to which attribute is attached  
+ * @param obj_name          IN: Name of object to which attribute is attached, relative to location  
+ * @param idx_type          IN: Type of index
+ * @param order             IN: Index traversal order  
+ * @param n                 IN: Attribute's position in index 
+ * @param aapl_id           IN: Attribute access property list 
+ * @param lapl_id           IN: Link access property list 
+ * 
+ * @return  An attribute identifier if successful; otherwise returns a negative value. 
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - Name is null.
+ **/
+public static int H5Aopen_by_idx(int loc_id, String obj_name, int idx_type, int order ,long n, int aapl_id, int lapl_id) 
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Aopen_by_idx(loc_id, obj_name, idx_type, order , n, aapl_id, lapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aopen_by_idx(int loc_id, String obj_name, int idx_type, int order ,long n, int aapl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Aopen_by_name Opens an attribute for an object by object name and attribute name
+*
+*  @param loc_id             IN: Location from which to find object to which attribute is attached  
+*  @param obj_name           IN: Name of object to which attribute is attached, relative to loc_id 
+*  @param attr_name          IN: Name of attribute to open  
+*  @param aapl_id            IN: Attribute access property list 
+*  @param lapl_id            IN: Link access property list identifier 
+*
+*  @return Returns an attribute identifier if successful; otherwise returns a negative value. 
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - obj_name is null.
+**/
+public static int H5Aopen_by_name(int loc_id, String obj_name, String attr_name, int aapl_id, int lapl_id) 
+throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Aopen_by_name(loc_id, obj_name, attr_name, aapl_id, lapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+/**
+ * H5Aopen_idx opens an attribute which is attached to the object specified
+ * with loc_id. The location object may be either a group, dataset, or named
+ * datatype, all of which may have any sort of attribute.
+ * 
+ * @deprecated As of HDF5 1.8,  replaced by {@link #H5Aopen_by_idx(int, String, int, int, long, int, int)  }
+ * 
+ * @param loc_id
+ *            IN: Identifier of the group, dataset, or named datatype attribute
+ * @param idx
+ *            IN: Index of the attribute to open.
+ * 
+ * @return attribute identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+ at Deprecated
+public static int H5Aopen_idx(int loc_id, int idx)
+        throws HDF5LibraryException
+{
+    int id = _H5Aopen_idx(loc_id, idx);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aopen_idx(int loc_id, int idx)
+        throws HDF5LibraryException;
+
+/**
+ * H5Aopen_name opens an attribute specified by its name, name, which is
+ * attached to the object specified with loc_id.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Aopen_by_name(int, String, String, int, int)}
+ *  
+ * @param loc_id
+ *            IN: Identifier of a group, dataset, or named datatype atttribute
+ * @param name
+ *            IN: Attribute name.
+ * 
+ * @return attribute identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+ at Deprecated
+public static int H5Aopen_name(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Aopen_name(loc_id, name);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Aopen_name(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aread reads an attribute, specified with attr_id. The attribute's
+ * memory datatype is specified with mem_type_id. The entire attribute is
+ * read into buf from the file.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute to read.
+ * @param mem_type_id
+ *            IN: Identifier of the attribute datatype (in memory).
+ * @param buf
+ *            IN: Buffer for data to be read.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data buffer is null.
+ **/
+public synchronized static native int H5Aread(int attr_id, int mem_type_id,
+        byte[] buf) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Aread reads an attribute, specified with attr_id. The attribute's
+ * memory datatype is specified with mem_type_id. The entire attribute is
+ * read into data object from the file.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute to read.
+ * @param mem_type_id
+ *            IN: Identifier of the attribute datatype (in memory).
+ * @param obj
+ *            IN: Object for data to be read.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data buffer is null. See public synchronized static
+ *                native int H5Aread( )
+ **/
+public synchronized static int H5Aread(int attr_id, int mem_type_id,
+        Object obj) throws HDF5Exception, NullPointerException
+{
+    HDFArray theArray = new HDFArray(obj);
+    byte[] buf = theArray.emptyBytes();
+
+    // This will raise an exception if there is an error
+    int status = H5Aread(attr_id, mem_type_id, buf);
+
+    // No exception: status really ought to be OK
+    if (status >= 0) {
+        obj = theArray.arrayify(buf);
+    }
+
+    return status;
+}
+
+public synchronized static native int H5AreadVL(int attr_id,
+        int mem_type_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Arename changes the name of attribute that is attached to the object specified by loc_id. 
+* The attribute named old_attr_name is renamed new_attr_name.
+* 
+* @param loc_id         IN: Location or object identifier; may be dataset or group   
+* @param old_attr_name  IN: Prior attribute name 
+* @param new_attr_name  IN: New attribute name 
+* 
+* @return  A non-negative value if successful; otherwise returns a negative value. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - Name is null.
+**/
+public synchronized static native int H5Arename(int loc_id, String old_attr_name, String new_attr_name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Arename_by_name changes the name of attribute that is attached to the object specified by loc_id and obj_name. 
+* The attribute named old_attr_name is renamed new_attr_name.
+* 
+* @param loc_id            IN: Location or object identifier; may be dataset or group   
+* @param obj_name          IN: Name of object, relative to location, whose attribute is to be renamed  
+* @param old_attr_name     IN: Prior attribute name 
+* @param new_attr_name     IN: New attribute name 
+* @param lapl_id           IN: Link access property list 
+* 
+* @return  A non-negative value if successful; otherwise returns a negative value. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - Name is null.
+**/
+public synchronized static native int H5Arename_by_name(int loc_id, String obj_name, String old_attr_name, String new_attr_name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Awrite writes an attribute, specified with attr_id. The attribute's
+ * memory datatype is specified with mem_type_id. The entire attribute is
+ * written from buf to the file.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute to write.
+ * @param mem_type_id
+ *            IN: Identifier of the attribute datatype (in memory).
+ * @param buf
+ *            IN: Data to be written.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data is null.
+ **/
+public synchronized static native int H5Awrite(int attr_id,
+        int mem_type_id, byte[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Awrite writes an attribute, specified with attr_id. The attribute's
+ * memory datatype is specified with mem_type_id. The entire attribute is
+ * written from data object to the file.
+ * 
+ * @param attr_id
+ *            IN: Identifier of an attribute to write.
+ * @param mem_type_id
+ *            IN: Identifier of the attribute datatype (in memory).
+ * @param obj
+ *            IN: Data object to be written.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data object is null. See public synchronized static
+ *                native int H5Awrite(int attr_id, int mem_type_id, byte[]
+ *                buf);
+ **/
+public synchronized static int H5Awrite(int attr_id, int mem_type_id,
+        Object obj) throws HDF5Exception, NullPointerException
+{
+    HDFArray theArray = new HDFArray(obj);
+    byte[] buf = theArray.byteify();
+
+    int retVal = H5Awrite(attr_id, mem_type_id, buf);
+    buf = null;
+    theArray = null;
+    return retVal;
+}
+
+public synchronized static native int H5AwriteVL(int attr_id,
+        int mem_type_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+///////// unimplemented ////////
+//herr_t  H5Aiterate2(hid_t loc_id, H5_index_t idx_type, H5_iter_order_t order, hsize_t *idx, H5A_operator2_t op, void *op_data);
+//herr_t  H5Aiterate_by_name(hid_t loc_id, const char *obj_name, H5_index_t idx_type,
+//                H5_iter_order_t order, hsize_t *idx, H5A_operator2_t op, void *op_data, hid_t lapd_id);
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5D: Datasets Interface Functions //
+////
+//////////////////////////////////////////////////////////////
+
+public synchronized static native int H5Dchdir_ext(String dir_name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Dcopy copies the content of one dataset to another dataset.
+ * 
+ * @param src_did
+ *            the identifier of the source dataset
+ * @param dst_did
+ *            the identifier of the destinaiton dataset
+ */
+public synchronized static native int H5Dcopy(int src_did, int dst_did)
+        throws HDF5LibraryException;
+
+/**
+ * H5Dclose ends access to a dataset specified by dataset_id and releases
+ * resources used by it.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset to finish access to.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Dclose(int dataset_id) throws HDF5LibraryException
+{
+    if (dataset_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");
+    
+    OPEN_IDS.removeElement(dataset_id);
+    return _H5Dclose(dataset_id);
+}
+
+private synchronized static native int _H5Dclose(int dataset_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Dcreate creates a data set with a name, name, in the file or in the
+* group specified by the identifier loc_id.
+*
+* @deprecated As of HDF5 1.8, replaced by {@link #H5Dcreate(int, String, int, int, int, int, int) }
+* 
+* @param loc_id
+*            Identifier of the file or group to create the dataset within.
+* @param name
+*            The name of the dataset to create.
+* @param type_id
+*            Identifier of the datatype to use when creating the dataset.
+* @param space_id
+*            Identifier of the dataspace to use when creating the dataset.
+* @param create_plist_id
+*            Identifier of the set creation property list.
+* 
+* @return a dataset identifier if successful
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+* @exception NullPointerException
+*                - name is null.
+**/
+ at Deprecated
+public static int H5Dcreate(int loc_id, String name, int type_id,
+     int space_id, int create_plist_id)
+     throws HDF5LibraryException, NullPointerException
+{
+ int id = _H5Dcreate(loc_id, name, type_id, space_id, create_plist_id);
+ if (id > 0)
+     OPEN_IDS.addElement(id);
+ return id;
+}
+
+private synchronized static native int _H5Dcreate(int loc_id, String name,
+     int type_id, int space_id, int create_plist_id)
+     throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dcreate creates a new dataset named name at the 
+ *  location specified by loc_id.
+ *
+ *  @param loc_id   IN: Location identifier 
+ *  @param name     IN: Dataset name
+ *  @param type_id  IN: Datatype identifier
+ *  @param space_id IN: Dataspace identifier 
+ *  @param lcpl_id  IN: Identifier of link creation property list.
+ *  @param dcpl_id  IN: Identifier of dataset creation property list.
+ *  @param dapl_id  IN: Identifier of dataset access property list.
+ *
+ *  @return a dataset identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public static int H5Dcreate(int loc_id, String name, int type_id,
+        int space_id, int lcpl_id, int dcpl_id, int dapl_id)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Dcreate2(loc_id, name, type_id, space_id, lcpl_id, dcpl_id, dapl_id);
+    if (id > 0)
+      OPEN_IDS.addElement(id);
+    return id;
+}
+/**
+ *  H5Dcreate2 creates a new dataset named name at the 
+ *  location specified by loc_id.
+ *
+ *  @see public static int H5Dcreate(int loc_id, String name, int type_id,
+ *     int space_id, int lcpl_id, int dcpl_id, int dapl_id)
+ **/
+private synchronized static native int _H5Dcreate2(int loc_id, String name, int type_id,
+        int space_id, int lcpl_id, int dcpl_id, int dapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dcreate_anon creates a dataset in the file specified by loc_id. 
+ *
+ *  @param loc_id   IN: Location identifier 
+ *  @param type_id  IN: Datatype identifier
+ *  @param space_id IN: Dataspace identifier 
+ *  @param dcpl_id  IN: Identifier of dataset creation property list.
+ *  @param dapl_id  IN: Identifier of dataset access property list.
+ *
+ *  @return a dataset identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Dcreate_anon(int loc_id, int type_id, int space_id,
+        int dcpl_id, int dapl_id)
+        throws HDF5LibraryException
+{
+    int id = _H5Dcreate_anon(loc_id, type_id, space_id, dcpl_id, dapl_id);
+    if (id > 0)
+      OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Dcreate_anon(int loc_id, int type_id, int space_id,
+        int dcpl_id, int dapl_id)
+    throws HDF5LibraryException;
+
+/**
+ * H5Dextend verifies that the dataset is at least of size size.
+ * 
+ * @param dataset_id IN: Identifier of the dataset.
+ * @param size       IN: Array containing the new magnitude of each dimension.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - size array is null.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Dset_extent(int, long[]) }
+ **/
+ at Deprecated
+public synchronized static native int H5Dextend(int dataset_id, byte[] size)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Dextend verifies that the dataset is at least of size size.
+ * 
+ * @param dataset_id IN: Identifier of the dataset.
+ * @param size       IN: Array containing the new magnitude of each dimension.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - size array is null.
+ * 
+  * @deprecated As of HDF5 1.8, replaced by {@link #H5Dset_extent(int, long[]) }
+ **/
+ at Deprecated
+public synchronized static int H5Dextend(int dataset_id, long[] size)
+        throws HDF5Exception, NullPointerException
+{
+    int rval = -1;
+    HDFArray theArray = new HDFArray(size);
+    byte[] buf = theArray.byteify();
+    rval = H5Dextend(dataset_id, buf);
+    buf = null;
+    theArray = null;
+    return rval;
+}
+
+/**
+ *  H5Dfill explicitly fills the dataspace selection in memory, space_id, 
+ *  with the fill value specified in fill. 
+ *
+ *  @param fill      IN: Pointer to the fill value to be used.
+ *  @param fill_type IN: Fill value datatype identifier.
+ *  @param buf   IN/OUT: Pointer to the memory buffer containing the selection to be filled.
+ *  @param buf_type  IN: Datatype of dataspace elements to be filled.
+ *  @param space     IN: Dataspace describing memory buffer and containing the selection to be filled.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native void H5Dfill(byte[] fill, int fill_type, byte[] buf, int buf_type, int space)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dget_access_plist returns an identifier for a copy of the
+ *  dataset access property list for a dataset.
+ *
+ *  @param dset_id IN: Identifier of the dataset to query.
+ *
+ *  @return a dataset access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Dget_access_plist(int dset_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Dget_create_plist returns an identifier for a copy of the dataset
+ * creation property list for a dataset.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset to query.
+ * @return a dataset creation property list identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Dget_create_plist(int dataset_id)
+        throws HDF5LibraryException
+{
+    int id = _H5Dget_create_plist(dataset_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Dget_create_plist(int dataset_id)
+        throws HDF5LibraryException;
+
+/** H5Dget_offset returns the address in the file of the dataset dset_id.
+ *
+ *  @param dset_id  IN: Identifier of the dataset in question
+ *
+ *  @return the offset in bytes.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Dget_offset(int dset_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Dget_space returns an identifier for a copy of the dataspace for a
+ * dataset.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset to query.
+ * 
+ * @return a dataspace identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Dget_space(int dataset_id) throws HDF5LibraryException
+{
+    int id = _H5Dget_space(dataset_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Dget_space(int dataset_id)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Dget_space_status determines whether space has been 
+ *  allocated for the dataset dset_id. 
+ *
+ *  @param dset_id IN: Identifier of the dataset to query.
+ *
+ *  @return the space allocation status
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Dget_space_status(int dset_id)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Dget_space_status determines whether space has been 
+ *  allocated for the dataset dset_id. 
+ *
+ *  @param dset_id IN: Identifier of the dataset to query.
+ *
+ *  @return the space allocation status
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Dget_space_status(int dset_id,
+        int[] status) throws HDF5LibraryException, NullPointerException
+{
+    return _H5Dget_space_status(dset_id, status);
+}
+private synchronized static native int _H5Dget_space_status(int dset_id,
+        int[] status) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Dget_storage_size returns the amount of storage that is required for
+ * the dataset.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset in question
+ * 
+ * @return he amount of storage space allocated for the dataset.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Dget_storage_size(int dataset_id)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * H5Dget_type returns an identifier for a copy of the datatype for a
+ * dataset.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset to query.
+ * 
+ * @return a datatype identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Dget_type(int dataset_id) throws HDF5LibraryException
+{
+    int id = _H5Dget_type(dataset_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Dget_type(int dataset_id)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Dgetdir_ext(String[] dir_name,
+        int size) throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Diterate iterates over all the data elements in the memory buffer buf, 
+ *  executing the callback function operator once for each such data element. 
+ *
+ *  @param buf     IN/OUT: Pointer to the memory containing the elements to iterate over.
+ *  @param buf_type    IN: Buffer datatype identifier.
+ *  @param space       IN: Dataspace describing memory buffer.
+ *  @param op          IN: Callback function to operate on each value.
+ *  @param op_data IN/OUT: Pointer to any user-efined data for use by operator function.
+ *
+ *  @return  returns the return value of the first operator that returns a positive value, or zero if all members were 
+ *           processed with no operator returning non-zero.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native int H5Diterate(byte[] buf, int buf_type, int space,
+        H5D_iterate_cb op, H5D_iterate_t op_data)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dopen opens the existing dataset specified by a location identifier 
+ *  and name, loc_id  and name, respectively. 
+ *
+ *  @deprecated As of HDF5 1.8, replaced by {@link #H5Dopen(int, String, int) }
+ *
+ *  @param loc_id   IN: Location identifier 
+ *  @param name     IN: Dataset name
+ *
+ *  @return a dataset identifier if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+ at Deprecated
+public static int H5Dopen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Dopen(loc_id, name);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Dopen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dopen opens the existing dataset specified by a location identifier 
+ *  and name, loc_id  and name, respectively. 
+ *
+ *  @param loc_id   IN: Location identifier 
+ *  @param name     IN: Dataset name
+ *  @param dapl_id  IN: Identifier of dataset access property list.
+ *
+ *  @return a dataset identifier if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public static int H5Dopen(int loc_id, String name, int dapl_id)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Dopen2(loc_id, name, dapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+/**
+ *  H5Dopen2 opens the existing dataset specified by a location identifier 
+ *  and name, loc_id  and name, respectively. 
+ *
+ *  @see public static int H5Dopen(int loc_id, String name, int dapl_id)
+ **/
+private synchronized static native int _H5Dopen2(int loc_id, String name, int dapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Dread reads a (partial) dataset, specified by its identifier
+ * dataset_id, from the file into the application memory buffer buf.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset read from.
+ * @param mem_type_id
+ *            Identifier of the memory datatype.
+ * @param mem_space_id
+ *            Identifier of the memory dataspace.
+ * @param file_space_id
+ *            Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ *            Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ *            Buffer to store data read from the file.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data buffer is null.
+ **/
+public synchronized static native int H5Dread(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, byte[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, byte[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id,
+            xfer_plist_id, buf, true);
+}
+
+public synchronized static int H5Dread(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, Object obj)
+        throws HDF5Exception, HDF5LibraryException, NullPointerException
+{
+    return H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id,
+            xfer_plist_id, obj, true);
+}
+
+/**
+ * H5Dread reads a (partial) dataset, specified by its identifier
+ * dataset_id, from the file into the application data object.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset read from.
+ * @param mem_type_id
+ *            Identifier of the memory datatype.
+ * @param mem_space_id
+ *            Identifier of the memory dataspace.
+ * @param file_space_id
+ *            Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ *            Identifier of a transfer property list for this I/O operation.
+ * @param obj
+ *            Object to store data read from the file.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5Exception
+ *                - Failure in the data conversion.
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data object is null.
+ **/
+public synchronized static int H5Dread(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, Object obj,
+        boolean isCriticalPinning)
+        throws HDF5Exception, HDF5LibraryException, NullPointerException
+{
+    int status = -1;
+    boolean is1D = false;
+
+    Class<? extends Object> dataClass = obj.getClass();
+    if (!dataClass.isArray()) {
+        throw (new HDF5JavaException("H5Dread: data is not an array"));
+    }
+
+    String cname = dataClass.getName();
+    is1D = (cname.lastIndexOf('[') == cname.indexOf('['));
+    char dname = cname.charAt(cname.lastIndexOf("[") + 1);
+
+    if (is1D && (dname == 'B')) {
+        status = H5Dread(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (byte[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'S')) {
+        status = H5Dread_short(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (short[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'I')) {
+        status = H5Dread_int(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (int[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'J')) {
+        status = H5Dread_long(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (long[]) obj);
+    }
+    else if (is1D && (dname == 'F')) {
+        status = H5Dread_float(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (float[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'D')) {
+        status = H5Dread_double(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (double[]) obj,
+                isCriticalPinning);
+    }
+    else if (H5.H5Tequal(mem_type_id, HDF5Constants.H5T_STD_REF_DSETREG)) {
+        status = H5Dread_reg_ref(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (String[]) obj);
+    }
+    else if (is1D && (dataClass.getComponentType() == String.class)) {
+        // Rosetta Biosoftware - add support for
+        // Strings (variable length)
+        if (H5.H5Tis_variable_str(mem_type_id)) {
+            status = H5DreadVL(dataset_id, mem_type_id, mem_space_id,
+                    file_space_id, xfer_plist_id, (Object[]) obj);
+        }
+        else {
+            status = H5Dread_string(dataset_id, mem_type_id, mem_space_id,
+                    file_space_id, xfer_plist_id, (String[]) obj);
+        }
+    }
+    else {
+        // Create a data buffer to hold the data
+        // into a Java Array
+        HDFArray theArray = new HDFArray(obj);
+        byte[] buf = theArray.emptyBytes();
+
+        // will raise exception if read fails
+        status = H5Dread(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, buf, isCriticalPinning);
+        if (status >= 0) {
+            // convert the data into a Java
+            // Array */
+            obj = theArray.arrayify(buf);
+        }
+
+        // clean up these: assign 'null' as hint
+        // to gc() */
+        buf = null;
+        theArray = null;
+    }
+
+    return status;
+}
+
+public synchronized static native int H5Dread_double(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, double[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread_double(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, double[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread_double(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dread_float(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, float[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread_float(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, float[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread_float(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dread_int(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, int[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread_int(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, int[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread_int(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dread_long(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, long[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread_long(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, long[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread_long(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dread_reg_ref(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Dread_reg_ref_data(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Dread_short(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, short[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dread_short(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, short[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dread_short(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dread_string(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5DreadVL(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, Object[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Dset_extent sets the current dimensions of the chunked dataset dset_id 
+ *  to the sizes specified in size. 
+ *
+ *  @param dset_id  IN: Chunked dataset identifier.
+ *  @param size     IN: Array containing the new magnitude of each dimension of the dataset. 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - size is null.
+ **/
+public synchronized static native void H5Dset_extent(int dset_id, long size[])
+        throws HDF5LibraryException, NullPointerException;
+
+private synchronized static native int _H5Aopen_by_name(int loc_id, String obj_name, String attr_name,int aapl_id, int lapl_id)
+throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Dvlen_get_buf_size(int dataset_id,
+        int type_id, int space_id, int[] size) throws HDF5LibraryException;
+
+/**
+ *  H5Dvlen_get_buf_size determines the number of bytes required to store the VL data from 
+ *  the dataset, using the space_id for the selection in the dataset on disk and the 
+ *  type_id for the memory representation of the VL data in memory. 
+ *
+ *  @param dset_id  IN: Identifier of the dataset read from.
+ *  @param type_id  IN: Identifier of the datatype.
+ *  @param space_id IN: Identifier of the dataspace.
+ *
+ *  @return the size in bytes of the memory buffer required to store the VL data.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native long H5Dvlen_get_buf_size_long(int dset_id, int type_id, int space_id)
+        throws HDF5LibraryException;
+//int H5Dvlen_get_buf_size(int dset_id, int type_id, int space_id, LongByReference size);
+
+/**
+ * 
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - buf is null.
+ **/
+public synchronized static native int H5Dvlen_reclaim(int type_id,
+        int space_id, int xfer_plist_id, byte[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Dwrite writes a (partial) dataset, specified by its identifier
+ * dataset_id, from the application memory buffer buf into the file.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset read from.
+ * @param mem_type_id
+ *            Identifier of the memory datatype.
+ * @param mem_space_id
+ *            Identifier of the memory dataspace.
+ * @param file_space_id
+ *            Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ *            Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ *            Buffer with data to be written to the file.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Dwrite(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, byte[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, byte[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id,
+            xfer_plist_id, buf, true);
+}
+
+public synchronized static int H5Dwrite(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, Object obj)
+        throws HDF5Exception, HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id,
+            xfer_plist_id, obj, true);
+}
+
+/**
+ * H5Dwrite writes a (partial) dataset, specified by its identifier
+ * dataset_id, from the application memory data object into the file.
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset read from.
+ * @param mem_type_id
+ *            Identifier of the memory datatype.
+ * @param mem_space_id
+ *            Identifier of the memory dataspace.
+ * @param file_space_id
+ *            Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ *            Identifier of a transfer property list for this I/O operation.
+ * @param obj
+ *            Object with data to be written to the file.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5Exception
+ *                - Failure in the data conversion.
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - data object is null.
+ **/
+public synchronized static int H5Dwrite(int dataset_id, int mem_type_id,
+        int mem_space_id, int file_space_id, int xfer_plist_id, Object obj,
+        boolean isCriticalPinning)
+        throws HDF5Exception, HDF5LibraryException, NullPointerException
+{
+    int status = -1;
+    boolean is1D = false;
+
+    Class<? extends Object> dataClass = obj.getClass();
+    if (!dataClass.isArray()) {
+        throw (new HDF5JavaException("H5Dread: data is not an array"));
+    }
+
+    String cname = dataClass.getName();
+    is1D = (cname.lastIndexOf('[') == cname.indexOf('['));
+    char dname = cname.charAt(cname.lastIndexOf("[") + 1);
+
+    if (is1D && (dname == 'B')) {
+        status = H5Dwrite(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (byte[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'S')) {
+        status = H5Dwrite_short(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (short[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'I')) {
+        status = H5Dwrite_int(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (int[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'J')) {
+        status = H5Dwrite_long(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (long[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'F')) {
+        status = H5Dwrite_float(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (float[]) obj,
+                isCriticalPinning);
+    }
+    else if (is1D && (dname == 'D')) {
+        status = H5Dwrite_double(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (double[]) obj,
+                isCriticalPinning);
+    }
+
+    // Rosetta Biosoftware - call into H5DwriteString
+    // for variable length Strings
+    else if ((H5.H5Tget_class(mem_type_id) == HDF5Constants.H5T_STRING)
+            && H5.H5Tis_variable_str(mem_type_id) && dataClass.isArray()
+            && (dataClass.getComponentType() == String.class) && is1D) {
+        status = H5DwriteString(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, (String[]) obj);
+
+    }
+    else {
+        HDFArray theArray = new HDFArray(obj);
+        byte[] buf = theArray.byteify();
+
+        /* will raise exception on error */
+        status = H5Dwrite(dataset_id, mem_type_id, mem_space_id,
+                file_space_id, xfer_plist_id, buf, isCriticalPinning);
+
+        // clean up these: assign 'null' as hint to
+        // gc() */
+        buf = null;
+        theArray = null;
+    }
+
+    return status;
+}
+
+public synchronized static native int H5Dwrite_double(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, double[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite_double(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, double[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite_double(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dwrite_float(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, float[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite_float(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, float[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite_float(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dwrite_int(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, int[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite_int(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, int[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite_int(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dwrite_long(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, long[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite_long(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, long[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite_long(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+public synchronized static native int H5Dwrite_short(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, short[] buf, boolean isCriticalPinning)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Dwrite_short(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, short[] buf)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Dwrite_short(dataset_id, mem_type_id, mem_space_id,
+            file_space_id, xfer_plist_id, buf, true);
+}
+
+/**
+ * H5DwriteString writes a (partial) variable length String dataset,
+ * specified by its identifier dataset_id, from the application memory
+ * buffer buf into the file.
+ * 
+ * ---- contributed by Rosetta Biosoftware
+ * 
+ * @param dataset_id
+ *            Identifier of the dataset read from.
+ * @param mem_type_id
+ *            Identifier of the memory datatype.
+ * @param mem_space_id
+ *            Identifier of the memory dataspace.
+ * @param file_space_id
+ *            Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ *            Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ *            Buffer with data to be written to the file.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+
+public synchronized static native int H5DwriteString(int dataset_id,
+        int mem_type_id, int mem_space_id, int file_space_id,
+        int xfer_plist_id, String[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+///////// unimplemented ////////
+//herr_t H5Dgather(hid_t src_space_id, const void *src_buf, hid_t type_id,
+//                 size_t dst_buf_size, void *dst_buf, H5D_gather_func_t op, void *op_data);
+//herr_t H5Dscatter(H5D_scatter_func_t op, void *op_data, hid_t type_id, hid_t dst_space_id, void *dst_buf);
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5E: Error Stack //
+////
+//////////////////////////////////////////////////////////////
+
+/**
+* H5Eauto_is_v2 determines whether the error auto reporting function for an
+* error stack conforms to the H5E_auto2_t typedef or the H5E_auto1_t
+* typedef.
+* 
+* @param stack_id
+*            IN: Error stack identifier.
+* 
+* @return boolean true if the error stack conforms to H5E_auto2_t and false
+*         if it conforms to H5E_auto1_t.
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+**/
+public synchronized static native boolean H5Eauto_is_v2(int stack_id)
+     throws HDF5LibraryException;
+
+/**
+* H5Eclear clears the error stack for the current thread. H5Eclear can fail
+* if there are problems initializing the library.
+* <p>
+* This may be used by exception handlers to assure that the error condition
+* in the HDF-5 library has been reset.
+* 
+* @return Returns a non-negative value if successful
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+**/
+public static int H5Eclear() throws HDF5LibraryException
+{
+ H5Eclear2(HDF5Constants.H5E_DEFAULT);
+ return 0;
+}
+
+/**
+ * H5Eclear clears the error stack specified by estack_id, or, if estack_id
+ * is set to H5E_DEFAULT, the error stack for the current thread.
+ * 
+ * @param stack_id
+ *            IN: Error stack identifier.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static void H5Eclear(int stack_id) throws HDF5LibraryException
+{
+    H5Eclear2(stack_id);
+}
+
+/**
+ * H5Eclear2 clears the error stack specified by estack_id, or, if estack_id
+ * is set to H5E_DEFAULT, the error stack for the current thread.
+ * 
+ * @see #H5Eclear
+ **/
+public synchronized static native void H5Eclear2(int stack_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Eclose_msg closes an error message identifier, which can be either a
+ * major or minor message.
+ * 
+ * @param err_id
+ *            IN: Error message identifier.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Eclose_msg(int err_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Eclose_stack closes the object handle for an error stack and releases
+ * its resources.
+ * 
+ * @param stack_id
+ *            IN: Error stack identifier.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Eclose_stack(int stack_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Ecreate_msg adds an error message to an error class defined by client
+ * library or application program.
+ * 
+ * @param cls_id
+ *            IN: Error class identifier.
+ * @param msg_type
+ *            IN: The type of the error message.
+ * @param msg
+ *            IN: The error message.
+ * 
+ * @return a message identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - msg is null.
+ **/
+public synchronized static native int H5Ecreate_msg(int cls_id,
+        int msg_type, String msg)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Ecreate_stack creates a new empty error stack and returns the new
+ * stack's identifier.
+ * 
+ * @return an error stack identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Ecreate_stack()
+        throws HDF5LibraryException;
+
+/**
+ * H5Eget_class_name retrieves the name of the error class specified by the
+ * class identifier.
+ * 
+ * @param class_id
+ *            IN: Error class identifier.
+ * 
+ * @return the name of the error class
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native String H5Eget_class_name(int class_id)
+        throws HDF5LibraryException, NullPointerException;
+// long H5Eget_class_name(int class_id, String name, IntegerType size);
+
+/**
+ * H5Eget_current_stack copies the current error stack and returns an error
+ * stack identifier for the new copy.
+ * 
+ * @return an error stack identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Eget_current_stack()
+        throws HDF5LibraryException;
+
+/**
+ * H5Eset_current_stack replaces the content of the current error stack with
+ * a copy of the content of the error stack specified by estack_id.
+ * 
+ * @param stack_id
+ *            IN: Error stack identifier.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Eset_current_stack(int stack_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Eget_msg retrieves the error message including its length and type.
+ * 
+ * @param msg_id
+ *            IN: Name of the error class.
+ * @param type_list
+ *            OUT: The type of the error message. Valid values are H5E_MAJOR
+ *            and H5E_MINOR.
+ * 
+ * @return the error message
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native String H5Eget_msg(int msg_id,
+        int[] type_list) throws HDF5LibraryException;
+// long H5Eget_msg(int msg_id, H5E_TYPE type, String msg, IntegerType size);
+
+/**
+ * H5Eget_num retrieves the number of error records in the error stack
+ * specified by estack_id (including major, minor messages and description).
+ * 
+ * @param stack_id
+ *            IN: Error stack identifier.
+ * 
+ * @return the number of error messages
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Eget_num(int stack_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Eprint1 prints the error stack specified by estack_id on the specified
+ * stream, stream.
+ * 
+ * @param stream
+ *            IN: File pointer, or stderr if null.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Eprint2(int, Object)}
+ **/
+ at Deprecated
+public synchronized static native void H5Eprint1(Object stream)
+        throws HDF5LibraryException;
+
+/**
+ * H5Eprint2 prints the error stack specified by estack_id on the specified
+ * stream, stream.
+ * 
+ * @param stack_id
+ *            IN: Error stack identifier.If the identifier is H5E_DEFAULT,
+ *            the current error stack will be printed.
+ * @param stream
+ *            IN: File pointer, or stderr if null.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Eprint2(int stack_id,
+        Object stream) throws HDF5LibraryException;
+
+/**
+* H5Epop deletes the number of error records specified in count from the
+* top of the error stack specified by estack_id (including major, minor
+* messages and description).
+* 
+* @param stack_id
+*            IN: Error stack identifier.
+* @param count
+*            IN: Version of the client library or application to which the
+*            error class belongs.
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+**/
+public synchronized static native void H5Epop(int stack_id, long count)
+     throws HDF5LibraryException;
+
+/**
+ * H5Eregister_class registers a client library or application program to
+ * the HDF5 error API so that the client library or application program can
+ * report errors together with HDF5 library.
+ * 
+ * @param cls_name
+ *            IN: Name of the error class.
+ * @param lib_name
+ *            IN: Name of the client library or application to which the
+ *            error class belongs.
+ * @param version
+ *            IN: Version of the client library or application to which the
+ *            error class belongs.
+ * 
+ * @return a class identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Eregister_class(String cls_name,
+        String lib_name, String version)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Eunregister_class removes the error class specified by class_id.
+ * 
+ * @param class_id
+ *            IN: Error class identifier.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Eunregister_class(int class_id)
+        throws HDF5LibraryException;
+
+///////// unimplemented ////////
+//public interface H5E_auto2_t extends Callback
+//{
+//    int callback(int estack, Pointer client_data);
+//}
+
+//int H5Eget_auto(int estack_id, H5E_auto2_t func, PointerByReference client_data);
+//{
+//    return H5Eget_auto2(estack_id, func, client_data);
+//}
+//int H5Eget_auto2(int estack_id, H5E_auto2_t func, PointerByReference client_data);
+
+//int H5Eset_auto(int estack_id, H5E_auto2_t func, Pointer client_data);
+//{
+//    return H5Eset_auto2(estack_id, func, client_data);
+//}
+//int H5Eset_auto2(int estack_id, H5E_auto2_t func, Pointer client_data);
+
+
+//public static int H5Epush(int err_stack, String file, String func, int line,
+//              int cls_id, int maj_id, int min_id, String msg, ...)
+//{
+//    H5Epush2(err_stack, file, func, line, cls_id, maj_id, min_id, msg, ...);
+//}
+//public synchronized static native int H5Epush2(int err_stack, String file, String func, int line,
+//              int cls_id, int maj_id, int min_id, String msg, ...);
+
+////Error stack traversal callback function pointers
+//public interface H5E_walk2_t extends Callback
+//{
+//    int callback(int n, H5E_error2_t err_desc, Pointer client_data);
+//}
+
+//int H5Ewalk(int err_stack, H5E_direction_t direction, H5E_walk2_t func, Pointer client_data)
+//{
+//    return H5Ewalk2(err_stack, direction, func, client_data);
+//}
+//int H5Ewalk2(int err_stack, H5E_direction_t direction, H5E_walk2_t func, Pointer client_data);
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5F: File Interface Functions //
+////
+//////////////////////////////////////////////////////////////
+
+/**
+ * H5Fclose terminates access to an HDF5 file.
+ * 
+ * @param file_id
+ *            Identifier of a file to terminate access to.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Fclose(int file_id) throws HDF5LibraryException
+{
+    if (file_id <0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(file_id);
+    return _H5Fclose(file_id);
+}
+
+private synchronized static native int _H5Fclose(int file_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Fopen opens an existing file and is the primary function for accessing
+* existing HDF5 files.
+* 
+* @param name
+*            Name of the file to access.
+* @param flags
+*            File access flags.
+* @param access_id
+*            Identifier for the file access properties list.
+* 
+* @return a file identifier if successful
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+* @exception NullPointerException
+*                - name is null.
+**/
+public static int H5Fopen(String name, int flags, int access_id)
+     throws HDF5LibraryException, NullPointerException
+{
+ int id = _H5Fopen(name, flags, access_id);
+ if (id > 0)
+     OPEN_IDS.addElement(id);
+ return id;
+}
+
+private synchronized static native int _H5Fopen(String name, int flags,
+     int access_id) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Freopen reopens an HDF5 file.
+ * 
+ * @param file_id
+ *            Identifier of a file to terminate and reopen access to.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @return a new file identifier if successful
+ **/
+public static int H5Freopen(int file_id) throws HDF5LibraryException
+{
+    int id = _H5Freopen(file_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Freopen(int file_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fcreate is the primary function for creating HDF5 files.
+ * 
+ * @param name
+ *            Name of the file to access.
+ * @param flags
+ *            File access flags. Possible values include:
+ *            <UL>
+ *            <LI>
+ *            H5F_ACC_RDWR Allow read and write access to file.</LI>
+ *            <LI>
+ *            H5F_ACC_RDONLY Allow read-only access to file.</LI>
+ *            <LI>
+ *            H5F_ACC_TRUNC Truncate file, if it already exists, erasing all
+ *            data previously stored in the file.</LI>
+ *            <LI>
+ *            H5F_ACC_EXCL Fail if file already exists.</LI>
+ *            <LI>
+ *            H5F_ACC_DEBUG Print debug information.</LI>
+ *            <LI>
+ *            H5P_DEFAULT Apply default file access and creation properties.
+ *            </LI>
+ *            </UL>
+ * 
+ * @param create_id
+ *            File creation property list identifier, used when modifying
+ *            default file meta-data. Use H5P_DEFAULT for default access
+ *            properties.
+ * @param access_id
+ *            File access property list identifier. If parallel file access
+ *            is desired, this is a collective call according to the
+ *            communicator stored in the access_id (not supported in Java).
+ *            Use H5P_DEFAULT for default access properties.
+ * 
+ * @return a file identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public static int H5Fcreate(String name, int flags, int create_id,
+        int access_id) throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Fcreate(name, flags, create_id, access_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Fcreate(String name, int flags,
+        int create_id, int access_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Fflush causes all buffers associated with a file or object to be
+ * immediately flushed (written) to disk without removing the data from the
+ * (memory) cache.
+ * <P>
+ * After this call completes, the file (or object) is in a consistent state
+ * and all data written to date is assured to be permanent.
+ * 
+ * @param object_id
+ *            Identifier of object used to identify the file.
+ *            <b>object_id</b> can be any object associated with the file,
+ *            including the file itself, a dataset, a group, an attribute,
+ *            or a named data type.
+ * @param scope
+ *            specifies the scope of the flushing action, in the case that
+ *            the HDF-5 file is not a single physical file.
+ *            <P>
+ *            Valid values are:
+ *            <UL>
+ *            <LI>
+ *            H5F_SCOPE_GLOBAL Flushes the entire virtual file.</LI>
+ *            <LI>
+ *            H5F_SCOPE_LOCAL Flushes only the specified file.</LI>
+ *            </UL>
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Fflush(int object_id, int scope)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fget_access_plist returns the file access property list identifier of
+ * the specified file.
+ * 
+ * @param file_id
+ *            Identifier of file to get access property list of
+ * 
+ * @return a file access property list identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Fget_access_plist(int file_id)
+        throws HDF5LibraryException
+{
+    int id = _H5Fget_access_plist(file_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Fget_access_plist(int file_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fget_create_plist returns a file creation property list identifier
+ * identifying the creation properties used to create this file.
+ * 
+ * @param file_id
+ *            Identifier of the file to get creation property list
+ * 
+ * @return a file creation property list identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Fget_create_plist(int file_id)
+        throws HDF5LibraryException
+{
+    int id = _H5Fget_create_plist(file_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Fget_create_plist(int file_id)
+        throws HDF5LibraryException;
+
+public synchronized static native long H5Fget_filesize(int file_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fget_freespace returns the amount of space that is unused by any
+ * objects in the file.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * 
+ * @return the amount of free space in the file
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Fget_freespace(int file_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fget_intent retrieves the intended access mode flag passed with H5Fopen
+ * when the file was opened.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * 
+ * @return the intended access mode flag, as originally passed with H5Fopen.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Fget_intent(int file_id)
+        throws HDF5LibraryException;
+// int H5Fget_intent(int file_id, IntByReference intent);
+
+/**
+* H5Fget_mdc_hit_rate queries the metadata cache of the target file to
+* obtain its hit rate (cache hits / (cache hits + cache misses)) since the
+* last time hit rate statistics were reset.
+* 
+* @param file_id
+*            IN: Identifier of the target file.
+* 
+* @return the double in which the hit rate is returned.
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+**/
+public synchronized static native double H5Fget_mdc_hit_rate(int file_id)
+     throws HDF5LibraryException;
+
+/**
+ * H5Fget_mdc_size queries the metadata cache of the target file for the
+ * desired size information.
+ * 
+ * @param file_id
+ *            IN: Identifier of the target file.
+ * @param metadata_cache
+ *            OUT: Current metadata cache information
+ *            <ul>
+ *            <li>metadata_cache[0] = max_size_ptr // current cache maximum
+ *            size</li>
+ *            <li>metadata_cache[1] = min_clean_size_ptr // current cache
+ *            minimum clean size</li>
+ *            <li>metadata_cache[2] = cur_size_ptr // current cache size</li>
+ *            </ul>
+ * 
+ * @return current number of entries in the cache
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - metadata_cache is null.
+ **/
+public synchronized static native int H5Fget_mdc_size(int file_id,
+        long[] metadata_cache)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Fget_name retrieves the name of the file to which the object obj_id
+ * belongs.
+ * 
+ * @param obj_id
+ *            IN: Identifier of the object for which the associated filename
+ *            is sought.
+ * 
+ * @return the filename.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native String H5Fget_name(int obj_id)
+        throws HDF5LibraryException;
+// long H5Fget_name(int obj_id, Buffer name/*out*/, long size);
+
+public synchronized static native String H5Fget_name(int obj_id, int size)
+        throws HDF5LibraryException;
+
+/**
+ * H5Fget_obj_count returns the number of open object identifiers for the
+ * file.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * @param types
+ *            IN: Type of object for which identifiers are to be returned.
+ *            <ul>
+ *            <li>H5F_OBJ_FILE Files only</li>
+ *            <li>H5F_OBJ_DATASET Datasets only</li>
+ *            <li>H5F_OBJ_GROUP Groups only</li>
+ *            <li>H5F_OBJ_DATATYPE Named datatypes only</li>
+ *            <li>H5F_OBJ_ATTR Attributes only</li>
+ *            <li>H5F_OBJ_ALL All of the above</li>
+ *            <li>H5F_OBJ_LOCAL Restrict search to objects opened through
+ *            current file identifier.</li>
+ *            </ul>
+ * 
+ * @return the number of open objects.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Fget_obj_count(int file_id,
+        int types) throws HDF5LibraryException;
+
+/**
+ * H5Fget_obj_count returns the number of open object identifiers for the
+ * file.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * @param types
+ *            IN: Type of object for which identifiers are to be returned.
+ *            <ul>
+ *            <li>H5F_OBJ_FILE Files only</li>
+ *            <li>H5F_OBJ_DATASET Datasets only</li>
+ *            <li>H5F_OBJ_GROUP Groups only</li>
+ *            <li>H5F_OBJ_DATATYPE Named datatypes only</li>
+ *            <li>H5F_OBJ_ATTR Attributes only</li>
+ *            <li>H5F_OBJ_ALL All of the above</li>
+ *            <li>H5F_OBJ_LOCAL Restrict search to objects opened through
+ *            current file identifier.</li>
+ *            </ul>
+ * 
+ * @return the number of open objects.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Fget_obj_count_long(int file_id,
+        int types) throws HDF5LibraryException;
+
+/**
+ * H5Fget_obj_ids returns the list of identifiers for all open HDF5 objects
+ * fitting the specified criteria.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * @param types
+ *            IN: Type of object for which identifiers are to be returned.
+ * @param max_objs
+ *            IN: Maximum number of object identifiers to place into
+ *            obj_id_list.
+ * @param obj_id_list
+ *            OUT: Pointer to the returned list of open object identifiers.
+ * 
+ * @return the number of objects placed into obj_id_list.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - obj_id_list is null.
+ **/
+public synchronized static native int H5Fget_obj_ids(int file_id,
+        int types, int max_objs, int[] obj_id_list)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Fget_obj_ids returns the list of identifiers for all open HDF5 objects
+ * fitting the specified criteria.
+ * 
+ * @param file_id
+ *            IN: File identifier for a currently-open HDF5 file
+ * @param types
+ *            IN: Type of object for which identifiers are to be returned.
+ * @param max_objs
+ *            IN: Maximum number of object identifiers to place into
+ *            obj_id_list.
+ * @param obj_id_list
+ *            OUT: Pointer to the returned list of open object identifiers.
+ * 
+ * @return the number of objects placed into obj_id_list.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - obj_id_list is null.
+ **/
+public synchronized static native long H5Fget_obj_ids_long(int file_id,
+        int types, long max_objs, int[] obj_id_list)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Fis_hdf5 determines whether a file is in the HDF5 format.
+ * 
+ * @param name
+ *            File name to check format.
+ * 
+ * @return true if is HDF-5, false if not.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native boolean H5Fis_hdf5(String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Fmount mounts the file specified by child_id onto the group specified
+ * by loc_id and name using the mount properties plist_id.
+ * 
+ * @param loc_id
+ *            The identifier for the group onto which the file specified by
+ *            child_id is to be mounted.
+ * @param name
+ *            The name of the group onto which the file specified by
+ *            child_id is to be mounted.
+ * @param child_id
+ *            The identifier of the file to be mounted.
+ * @param plist_id
+ *            The identifier of the property list to be used.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Fmount(int loc_id, String name,
+        int child_id, int plist_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * Given a mount point, H5Funmount dissassociates the mount point's file
+ * from the file mounted there.
+ * 
+ * @param loc_id
+ *            The identifier for the location at which the specified file is
+ *            to be unmounted.
+ * @param name
+ *            The name of the file to be unmounted.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Funmount(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Freset_mdc_hit_rate_stats resets the hit rate statistics counters in
+ * the metadata cache associated with the specified file.
+ * 
+ * @param file_id
+ *            IN: Identifier of the target file.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Freset_mdc_hit_rate_stats(
+        int file_id) throws HDF5LibraryException;
+
+///////// unimplemented ////////
+//herr_t H5Fclear_elink_file_cache(hid_t file_id);
+
+//ssize_t H5Fget_file_image(hid_t file_id, void * buf_ptr, size_t buf_len);
+
+///**
+//* H5Fget_info returns global information for the file associated with the
+//* object identifier obj_id.
+//*
+//* @param obj_id IN: Object identifier for any object in the file.
+//*
+//* @return the structure containing global file information.
+//*
+//* @exception HDF5LibraryException - Error from the HDF-5 Library.
+//**/
+//public synchronized static native H5F_info_t H5Fget_info(int obj_id)
+//throws HDF5LibraryException, NullPointerException;
+//int H5Fget_info(int obj_id, H5F_info_t file_info);
+
+///**
+//* H5Fget_mdc_config loads the current metadata cache configuration into
+//* the instance of H5AC_cache_config_t pointed to by the config_ptr
+//parameter.
+//*
+//* @param file_id IN: Identifier of the target file
+//* @param config_ptr IN/OUT: Pointer to the instance of
+//H5AC_cache_config_t in which the current metadata cache configuration is to be reported.
+//*
+//* @return none
+//*
+//* @exception HDF5LibraryException - Error from the HDF-5 Library.
+//* @exception NullPointerException - config_ptr is null.
+//**/
+//public synchronized static native void H5Fget_mdc_config(int file_id, H5AC_cache_config_t config_ptr)
+//throws HDF5LibraryException, NullPointerException;
+
+///**
+//* H5Fget_vfd_handle returns a pointer to the file handle from the
+//low-level file driver
+//* currently being used by the HDF5 library for file I/O.
+//*
+//* @param file_id IN: Identifier of the file to be queried.
+//* @param fapl IN: File access property list identifier.
+//*
+//* @return a pointer to the file handle being used by the low-level
+//virtual file driver.
+//*
+//* @exception HDF5LibraryException - Error from the HDF-5 Library.
+//**/
+//public synchronized static native Pointer file_handle
+//H5Fget_vfd_handle(int file_id, int fapl)
+//throws HDF5LibraryException;
+
+///**
+//* H5Fset_mdc_config attempts to configure the file's metadata cache
+//according to the configuration supplied.
+//*
+//* @param file_id IN: Identifier of the target file
+//* @param config_ptr IN: Pointer to the instance of H5AC_cache_config_t
+//containing the desired configuration.
+//*
+//* @return none
+//*
+//* @exception HDF5LibraryException - Error from the HDF-5 Library.
+//* @exception NullPointerException - config_ptr is null.
+//**/
+//public synchronized static native int H5Fset_mdc_config(int file_id,
+//H5AC_cache_config_t config_ptr)
+//throws HDF5LibraryException, NullPointerException;
+
+// ////////////////////////////////////////////////////////////
+// //
+// H5G: Group Interface Functions //
+// //
+// ////////////////////////////////////////////////////////////
+
+/**
+ * H5Gclose releases resources used by a group which was opened by a call to
+ * H5Gcreate() or H5Gopen().
+ * 
+ * @param group_id
+ *            Group identifier to release.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Gclose(int group_id) throws HDF5LibraryException
+{
+    if (group_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(group_id);
+    return _H5Gclose(group_id);
+}
+
+private synchronized static native int _H5Gclose(int group_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Gcreate creates a new group with the specified name at the specified
+ * location, loc_id.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Gcreate(int, String, int, int, int) }
+ * 
+ * @param loc_id
+ *            The file or group identifier.
+ * @param name
+ *            The absolute or relative name of the new group.
+ * @param size_hint
+ *            An optional parameter indicating the number of bytes to
+ *            reserve for the names that will appear in the group.
+ * 
+ * @return a valid group identifier for the open group if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+ at Deprecated
+public static int H5Gcreate(int loc_id, String name, long size_hint)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Gcreate(loc_id, name, size_hint);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Gcreate(int loc_id, String name,
+        long size_hint) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gcreate creates a new group with the specified name at the specified
+ * location, loc_id.
+ * 
+ * @param loc_id
+ *            IN: The file or group identifier.
+ * @param name
+ *            IN: The absolute or relative name of the new group.
+ * @param lcpl_id
+ *            IN: Identifier of link creation property list.
+ * @param gcpl_id
+ *            IN: Identifier of group creation property list.
+ * @param gapl_id
+ *            IN: Identifier of group access property list. (No group access
+ *            properties have been implemented at this time; use
+ *            H5P_DEFAULT.)
+ * 
+ * @return a valid group identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public static int H5Gcreate(int loc_id, String name,
+        int lcpl_id, int gcpl_id, int gapl_id)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Gcreate2(loc_id, name, lcpl_id, gcpl_id, gapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Gcreate2(int loc_id, String name,
+        int lcpl_id, int gcpl_id, int gapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gcreate_anon creates a new empty group in the file specified by loc_id.
+ * 
+ * @param loc_id
+ *            IN: File or group identifier specifying the file in which the
+ *            new group is to be created.
+ * @param gcpl_id
+ *            IN: Identifier of group creation property list.
+ * @param gapl_id
+ *            IN: Identifier of group access property list. (No group access
+ *            properties have been implemented at this time; use
+ *            H5P_DEFAULT.)
+ * 
+ * @return a valid group identifier
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Gcreate_anon(int loc_id,
+        int gcpl_id, int gapl_id) throws HDF5LibraryException
+{
+    int id = _H5Gcreate_anon(loc_id, gcpl_id, gapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Gcreate_anon(int loc_id,
+        int gcpl_id, int gapl_id) throws HDF5LibraryException;
+
+/**
+ * H5Gget_comment retrieves the comment for the the object name. The comment
+ * is returned in the buffer comment.
+ * 
+ * @param loc_id
+ *            IN: Identifier of the file, group, dataset, or datatype.
+ * @param name
+ *            IN: Name of the object whose comment is to be set or reset.
+ * @param bufsize
+ *            IN: Anticipated size of the buffer required to hold comment.
+ * @param comment
+ *            OUT: The comment.
+ * @return the number of characters in the comment, counting the null
+ *         terminator, if successful
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                - JNI error writing back data
+ * @exception ArrayStoreException
+ *                - JNI error writing back data
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ * @exception IllegalArgumentException
+ *                - size < 1, comment is invalid.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Oget_comment(int)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gget_comment(int loc_id,
+        String name, int bufsize, String[] comment)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Gset_comment sets the comment for the the object name to comment. Any
+ * previously existing comment is overwritten.
+ * 
+ * @param loc_id
+ *            IN: Identifier of the file, group, dataset, or datatype.
+ * @param name
+ *            IN: Name of the object whose comment is to be set or reset.
+ * @param comment
+ *            IN: The new comment.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name or comment is null.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Oset_comment(int, String)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gset_comment(int loc_id,
+        String name, String comment)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gget_create_plist returns an identifier for the group creation property
+ * list associated with the group specified by group_id.
+ * 
+ * @param group_id
+ *            IN: Identifier of the group.
+ * 
+ * @return an identifier for the group's creation property list
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Gget_create_plist(int group_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Gget_info retrieves information about the group specified by group_id.
+ * The information is returned in the group_info struct.
+ * 
+ * @param group_id
+ *            IN: Identifier of the group.
+ * 
+ * @return a structure in which group information is returned
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native H5G_info_t H5Gget_info(int group_id)
+        throws HDF5LibraryException;
+// int H5Gget_info(int loc_id, H5G_info_t ginfo);
+
+/**
+ * H5Gget_info_by_idx retrieves information about a group, according to the
+ * group's position within an index.
+ * 
+ * @param group_id
+ *            IN: File or group identifier.
+ * @param group_name
+ *            IN: Name of group for which information is to be retrieved.
+ * @param idx_type
+ *            IN: Type of index by which objects are ordered
+ * @param order
+ *            IN: Order of iteration within index
+ * @param n
+ *            IN: Attribute's position in index
+ * @param lapl_id
+ *            IN: Link access property list.
+ * 
+ * @return a structure in which group information is returned
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native H5G_info_t H5Gget_info_by_idx(
+        int group_id, String group_name, int idx_type, int order, long n,
+        int lapl_id) throws HDF5LibraryException, NullPointerException;
+// int H5Gget_info_by_idx(int group_id, String group_name,
+// H5_index_t idx_type, H5_iter_order_t order, long n, H5G_info_t ginfo, int
+// lapl_id);
+
+/**
+ * H5Gget_info_by_name retrieves information about the group group_name
+ * located in the file or group specified by loc_id.
+ * 
+ * @param group_id
+ *            IN: File or group identifier.
+ * @param name
+ *            IN: Name of group for which information is to be retrieved.
+ * @param lapl_id
+ *            IN: Link access property list.
+ * 
+ * @return a structure in which group information is returned
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native H5G_info_t H5Gget_info_by_name(
+        int group_id, String name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+// int H5Gget_info_by_name(int group_id, String name, H5G_info_t ginfo, int
+// lapl_id);
+
+/**
+ * H5Gget_linkval returns size characters of the link value through the
+ * value argument if loc_id (a file or group identifier) and name specify a
+ * symbolic link.
+ * 
+ * @param loc_id
+ *            IN: Identifier of the file, group, dataset, or datatype.
+ * @param name
+ *            IN: Name of the object whose link value is to be checked.
+ * @param size
+ *            IN: Maximum number of characters of value to be returned.
+ * @param value
+ *            OUT: Link value.
+ * 
+ * @return a non-negative value, with the link value in value, if
+ *         successful.
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Copy back failed
+ * @exception ArrayStoreException
+ *                Copy back failed
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ * @exception IllegalArgumentException
+ *                - size is invalid
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Lget_val(int, String, String[] , int)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gget_linkval(int loc_id,
+        String name, int size, String[] value)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * Returns number of objects in the group specified by its identifier
+ * 
+ * @param loc_id
+ *            Identifier of the group or the file
+ * @param num_obj
+ *            Number of objects in the group
+ * @return positive value if successful; otherwise returns a negative value.
+ * @throws HDF5LibraryException
+ * @throws NullPointerException
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Gget_info(int)}
+ */
+ at Deprecated
+public synchronized static native int H5Gget_num_objs(int loc_id,
+        long[] num_obj) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * retrieves information of all objects under the group (name) located in
+ * the file or group specified by loc_id.
+ * 
+ * @param loc_id
+ *            IN: File or group identifier
+ * @param name
+ *            IN: Name of group for which information is to be retrieved
+ * @param objNames
+ *            OUT: Names of all objects under the group, name.
+ * @param objTypes
+ *            OUT: Types of all objects under the group, name.
+ * @param objRef
+ *            OUT: Reference number of all objects under the group, name.
+ * 
+ * @return the number of items found
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ */
+public synchronized static int H5Gget_obj_info_all(int loc_id, String name,
+        String[] objNames, int[] objTypes, long[] objRef)
+        throws HDF5LibraryException, NullPointerException
+{
+    if (objNames == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_all(): name array is null");
+    }
+    
+   return H5Gget_obj_info_all(loc_id, name, objNames, objTypes, null, null, objRef, HDF5Constants.H5_INDEX_NAME);
+}
+
+public synchronized static int H5Gget_obj_info_all(int loc_id, String name,
+        String[] oname, int[] otype, int[] ltype, long[] ref, int indx_type)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, null, ref, indx_type, -1);
+}
+
+public synchronized static int H5Gget_obj_info_all(int loc_id, String name,
+        String[] oname, int[] otype, int[] ltype, long[] fno, long[] ref, int indx_type)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, fno, ref, oname.length, indx_type, -1);
+}
+
+public synchronized static int H5Gget_obj_info_full(int loc_id, String name,
+        String[] oname, int[] otype, int[] ltype, long[] fno, long[] ref, int indx_type, int indx_order)
+        throws HDF5LibraryException, NullPointerException
+{
+    if (oname == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_full(): name array is null");
+    }
+
+    if (otype == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_full(): object type array is null");
+    }
+
+    if (oname.length == 0) {
+        throw new HDF5LibraryException(
+                "H5Gget_obj_info_full(): array size is zero");
+    }
+
+    if (oname.length != otype.length) {
+        throw new HDF5LibraryException(
+                "H5Gget_obj_info_full(): name and type array sizes are different");
+    }
+    
+    if (ltype == null)
+        ltype = new int[otype.length];
+
+    if (fno == null)
+        fno = new long[ref.length];
+    
+    if (indx_type < 0)
+        indx_type = HDF5Constants.H5_INDEX_NAME;
+    
+    if (indx_order < 0)
+        indx_order = HDF5Constants.H5_ITER_INC;
+    
+    return H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, fno, ref, oname.length, indx_type, indx_order);
+}
+
+private synchronized static native int H5Gget_obj_info_full(int loc_id,
+        String name, String[] oname, int[] otype, int[] ltype, long[] fno, long[] ref, int n, int indx_type, int indx_order)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gget_obj_info_idx report the name and type of object with index 'idx'
+ * in a Group. The 'idx' corresponds to the index maintained by H5Giterate.
+ * Each link is returned, so objects with multiple links will be counted
+ * once for each link.
+ * 
+ * @param loc_id
+ *            IN: file or group ID.
+ * @param name
+ *            IN: name of the group to iterate, relative to the loc_id
+ * @param idx
+ *            IN: the index of the object to iterate.
+ * @param oname
+ *            the name of the object [OUT]
+ * @param type
+ *            the type of the object [OUT]
+ * 
+ * @return non-negative if successful, -1 if not.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ */
+public synchronized static int H5Gget_obj_info_idx(int loc_id, String name,
+        int idx, String[] oname, int[] type)
+        throws HDF5LibraryException, NullPointerException
+{
+    long default_buf_size = 4096;
+    String n[] = new String[1];
+    n[0] = new String("");
+    int grp_id = H5Gopen(loc_id, name);
+    long val = H5Gget_objname_by_idx(grp_id, idx, n, default_buf_size);
+    int type_code = H5Gget_objtype_by_idx(grp_id, idx);
+    oname[0] = new String(n[0]);
+    type[0] = type_code;
+    int ret = (new Long(val)).intValue();
+    return ret;
+}
+
+/*
+ * //////////////////////////////////////////////////////////////////////////
+ * /////// // // //Add these methods so that we don't need to call
+ *  //in a loop to get information for all the object
+ * in a group, which takes //a lot of time to finish if the number of
+ * objects is more than 10,000 //
+ * ///////////////////////////////////////////
+ * //////////////////////////////////////
+ */
+/**
+ * retrieves information of all objects (recurvisely) under the group (name)
+ * located in the file or group specified by loc_id upto maximum specified
+ * by objMax.
+ * 
+ * @param loc_id
+ *            IN: File or group identifier
+ * @param objNames
+ *            OUT: Names of all objects under the group, name.
+ * @param objTypes
+ *            OUT: Types of all objects under the group, name.
+ * @param lnkTypes
+ *            OUT: Types of all links under the group, name.
+ * @param objRef
+ *            OUT: Reference number of all objects under the group, name.
+ * @param objMax
+ *            IN: Maximum number of all objects under the group, name.
+ * 
+ * @return the number of items found
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ */
+public synchronized static int H5Gget_obj_info_max(int loc_id,
+        String[] objNames, int[] objTypes, int[] lnkTypes, long[] objRef, int objMax)
+        throws HDF5LibraryException, NullPointerException
+{
+    if (objNames == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_max(): name array is null");
+    }
+
+    if (objTypes == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_max(): object type array is null");
+    }
+
+    if (lnkTypes == null) {
+        throw new NullPointerException(
+                "H5Gget_obj_info_max(): link type array is null");
+    }
+
+    if (objNames.length <= 0) {
+        throw new HDF5LibraryException(
+                "H5Gget_obj_info_max(): array size is zero");
+    }
+
+    if (objMax <= 0) {
+        throw new HDF5LibraryException(
+                "H5Gget_obj_info_max(): maximum array size is zero");
+    }
+
+    if (objNames.length != objTypes.length) {
+        throw new HDF5LibraryException(
+                "H5Gget_obj_info_max(): name and type array sizes are different");
+    }
+
+    return H5Gget_obj_info_max(loc_id, objNames, objTypes, lnkTypes, objRef, objMax,
+            objNames.length);
+}
+
+private synchronized static native int H5Gget_obj_info_max(int loc_id,
+        String[] oname, int[] otype, int[] ltype, long[] ref, int amax, int n)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gget_objinfo returns information about the specified object.
+ * 
+ * @param loc_id
+ *            IN: File, group, dataset, or datatype identifier.
+ * @param name
+ *            IN: Name of the object for which status is being sought.
+ * @param follow_link
+ *            IN: Link flag.
+ * @param fileno
+ *            OUT: file id numbers.
+ * @param objno
+ *            OUT: object id numbers.
+ * @param link_info
+ *            OUT: link information.
+ * 
+ *            <pre>
+ *          link_info[0] = nlink
+ *          link_info[1] = type
+ *          link_info[2] = linklen
+ * </pre>
+ * @param mtime
+ *            OUT: modification time
+ * 
+ * @return a non-negative value if successful, with the fields of link_info
+ *         and mtime (if non-null) initialized.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name or array is null.
+ * @exception IllegalArgumentException
+ *                - bad argument.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Lget_info(int, String, int) and #H5Oget_info(int)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gget_objinfo(int loc_id,
+        String name, boolean follow_link, long[] fileno, long[] objno,
+        int[] link_info, long[] mtime)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Gget_objinfo returns information about the specified object in an
+ * HDF5GroupInfo object.
+ * 
+ * @param loc_id
+ *            IN: File, group, dataset, or datatype identifier.
+ * @param name
+ *            IN: Name of the object for which status is being sought.
+ * @param follow_link
+ *            IN: Link flag.
+ * @param info
+ *            OUT: the HDF5GroupInfo object to store the object infomation
+ * 
+ * @return a non-negative value if successful, with the fields of
+ *         HDF5GroupInfo object (if non-null) initialized.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ * 
+ * @see ncsa.hdf.hdf5lib.HDF5GroupInfo See public synchronized static native
+ *      int H5Gget_objinfo();
+ * 
+ * @deprecated As of HDF5 1.8
+ **/
+ at Deprecated
+public synchronized static int H5Gget_objinfo(int loc_id, String name,
+        boolean follow_link, HDF5GroupInfo info)
+        throws HDF5LibraryException, NullPointerException
+{
+    int status = -1;
+    long[] fileno = new long[2];
+    long[] objno = new long[2];
+    int[] link_info = new int[3];
+    long[] mtime = new long[1];
+
+    status = H5Gget_objinfo(loc_id, name, follow_link, fileno, objno,
+            link_info, mtime);
+
+    if (status >= 0) {
+        info.setGroupInfo(fileno, objno, link_info[0], link_info[1],
+                mtime[0], link_info[2]);
+    }
+    return status;
+}
+
+/**
+ * Returns a name of an object specified by an index.
+ * 
+ * @param group_id
+ *            Group or file identifier
+ * @param idx
+ *            Transient index identifying object
+ * @param name
+ *            the object name
+ * @param size
+ *            Name length
+ * @return the size of the object name if successful, or 0 if no name is
+ *         associated with the group identifier. Otherwise returns a
+ *         negative value
+ * @throws HDF5LibraryException
+ * @throws NullPointerException
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Lget_name_by_idx(int, String, int, int, long, int)}
+ */
+ at Deprecated
+public synchronized static native long H5Gget_objname_by_idx(int group_id,
+        long idx, String[] name, long size)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * Returns the type of an object specified by an index.
+ * 
+ * @param group_id
+ *            Group or file identifier.
+ * @param idx
+ *            Transient index identifying object.
+ * @return Returns the type of the object if successful. Otherwise returns a
+ *         negative value
+ * @throws HDF5LibraryException
+ * @throws NullPointerException
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Oget_info(int)}
+ */
+ at Deprecated
+public synchronized static native int H5Gget_objtype_by_idx(int group_id,
+        long idx) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Glink creates a new name for an already existing object.
+ * 
+ * @param loc_id
+ *            File, group, dataset, or datatype identifier.
+ * @param link_type
+ *            Link type. Possible values are:
+ *            <UL>
+ *            <LI>
+ *            H5G_LINK_HARD</LI>
+ *            <LI>
+ *            H5G_LINK_SOFT.</LI>
+ *            </UL>
+ * @param current_name
+ *            A name of the existing object if link is a hard link. Can be
+ *            anything for the soft link.
+ * @param new_name
+ *            New name for the object.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - current_name or name is null. 
+ * 
+ * @deprecated As of HDF5 1.8, replaced by
+ *             {@link #H5Lcreate_hard(int, String, int, String, int, int)
+ *             and #H5Lcreate_soft(String, int, String, int, int) }
+ **/
+ at Deprecated
+public synchronized static native int H5Glink(int loc_id, int link_type,
+        String current_name, String new_name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Glink creates a new name for an already existing object.
+ * 
+ * @deprecated As of HDF5 1.8
+ **/
+ at Deprecated
+public synchronized static native int H5Glink2(int curr_loc_id,
+        String current_name, int link_type, int new_loc_id, String new_name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gunlink removes an association between a name and an object.
+ * 
+ * @param loc_id
+ *            Identifier of the file containing the object.
+ * @param name
+ *            Name of the object to unlink.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Ldelete(int, String, int)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gunlink(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gmove renames an object within an HDF5 file. The original name, src, is
+ * unlinked from the group graph and the new name, dst, is inserted as an
+ * atomic operation. Both names are interpreted relative to loc_id, which is
+ * either a file or a group identifier.
+ * 
+ * @param loc_id
+ *            File or group identifier.
+ * @param src
+ *            Object's original name.
+ * @param dst
+ *            Object's new name.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - src or dst is null.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Lmove(int, String, int,String, int, int)}
+ **/
+ at Deprecated
+public synchronized static native int H5Gmove(int loc_id, String src,
+        String dst) throws HDF5LibraryException, NullPointerException;
+
+// Backward compatibility:
+// These functions have been replaced by new HDF5 library calls.
+// The interface is preserved as a convenience to existing code.
+/**
+ * H5Gn_members report the number of objects in a Group. The 'objects'
+ * include everything that will be visited by H5Giterate. Each link is
+ * returned, so objects with multiple links will be counted once for each
+ * link.
+ * 
+ * @param loc_id
+ *            file or group ID.
+ * @param name
+ *            name of the group to iterate, relative to the loc_id
+ * 
+ * @return the number of members in the group or -1 if error.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ */
+public synchronized static long H5Gn_members_long(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException 
+{
+    int grp_id = H5Gopen(loc_id, name);
+    long n = -1;
+
+    try { 
+        H5G_info_t info = H5.H5Gget_info(grp_id);
+        n =  info.nlinks;
+    } finally {
+        H5Gclose(grp_id); 
+    } 
+    
+    return n;
+}
+
+/**
+ * H5Gn_members report the number of objects in a Group. The 'objects'
+ * include everything that will be visited by H5Giterate. Each link is
+ * returned, so objects with multiple links will be counted once for each
+ * link.
+ * 
+ * @param loc_id
+ *            file or group ID.
+ * @param name
+ *            name of the group to iterate, relative to the loc_id
+ * 
+ * @return the number of members in the group or -1 if error.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ */
+public synchronized static int H5Gn_members(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException 
+{
+    return (int) H5Gn_members_long(loc_id, name);
+}
+
+/**
+ * H5Gopen opens an existing group with the specified name at the specified
+ * location, loc_id.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Gopen(int, String, int) }
+ * 
+ * @param loc_id
+ *            File or group identifier within which group is to be open.
+ * @param name
+ *            Name of group to open.
+ * 
+ * @return a valid group identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+ at Deprecated
+public static int H5Gopen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Gopen(loc_id, name);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Gopen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Gopen opens an existing group, name, at the location specified by
+ * loc_id.
+ * 
+ * @param loc_id
+ *            IN: File or group identifier specifying the location of the
+ *            group to be opened.
+ * @param name
+ *            IN: Name of group to open.
+ * @param gapl_id
+ *            IN: Identifier of group access property list. (No group access
+ *            properties have been implemented at this time; use
+ *            H5P_DEFAULT.)
+ * 
+ * @return a valid group identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public static int H5Gopen(int loc_id, String name,
+        int gapl_id) throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Gopen2(loc_id, name, gapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Gopen2(int loc_id, String name,
+        int gapl_id) throws HDF5LibraryException, NullPointerException;
+
+//////////////////////////////////////////////////////////////
+////
+//H5I: HDF5 1.8 Identifier Interface API Functions            //
+////
+//////////////////////////////////////////////////////////////
+
+public synchronized static native int H5Iget_file_id(int obj_id)
+        throws HDF5LibraryException;
+
+public synchronized static native long H5Iget_name(int obj_id,
+        String[] name, long size)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Iget_ref(int obj_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Idec_ref(int obj_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Iinc_ref(int obj_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Iget_type retrieves the type of the object identified by obj_id.
+ * 
+ * @param obj_id
+ *            IN: Object identifier whose type is to be determined.
+ * 
+ * @return the object type if successful; otherwise H5I_BADID.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Iget_type(int obj_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Iget_type_ref retrieves the reference count on an ID type. The reference count is used by the library to indicate when an ID type can be destroyed. 
+* 
+* @param type          
+*           IN: The identifier of the type whose reference count is to be retrieved
+* 
+* @return The current reference count on success, negative on failure.
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+**/
+public synchronized static native int H5Iget_type_ref(int type)
+        throws HDF5LibraryException;
+
+/**
+ * H5Inmembers returns the number of identifiers of the identifier type specified in type. 
+ * 
+ * @param type          
+ *           IN: Identifier for the identifier type whose member count will be retrieved
+ * 
+ * @return  Number of identifiers of the specified identifier type
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Inmembers(int type)
+        throws HDF5LibraryException;
+
+///////// unimplemented ////////
+//herr_t H5Iclear_type(H5I_type_t type, hbool_t force);
+
+//int H5Idec_type_ref(H5I_type_t type);
+
+//herr_t H5Idestroy_type(H5I_type_t type);
+
+//int H5Iinc_type_ref(H5I_type_t type);
+
+//htri_t H5Iis_valid(hid_t id);
+
+//void *H5Iobject_verify(hid_t id, H5I_type_t id_type);
+
+//hid_t H5Iregister(H5I_type_t type, const void *object);
+
+//H5I_type_t H5Iregister_type(size_t hash_size, unsigned reserved, H5I_free_t free_func);
+
+//void *H5Iremove_verify(hid_t id, H5I_type_t id_type);
+
+//void *H5Isearch(H5I_type_t type, H5I_search_func_t func, void *key);
+
+//htri_t H5Itype_exists(H5I_type_t type);
+
+
+// //////////////////////////////////////////////////////////////////
+// //
+// New APIs for HDF5Index //
+// October 10, 2005 //
+// //////////////////////////////////////////////////////////////////
+
+public synchronized static native int H5INcreate(String grp_name,
+        int grp_loc_id, int property_list, int data_loc_id,
+        String data_loc_name, String field_name, long max_mem_size);
+
+public synchronized static native int H5INquery(int dset_id, String keys[],
+        Object ubounds, Object lbounds, int nkeys);
+
+// //////////////////////////////////////////////////////////////////
+// H5L: Link Interface Functions //
+// //////////////////////////////////////////////////////////////////
+
+/**
+ *  H5Lcopy copies a link from one location to another. 
+ *
+ *  @param src_loc   IN: Location identifier of the source link 
+ *  @param src_name  IN: Name of the link to be copied 
+ *  @param dst_loc   IN: Location identifier specifying the destination of the copy 
+ *  @param dst_name  IN: Name to be assigned to the new copy
+ *  @param lcpl_id   IN: Link creation property list identifier
+ *  @param lapl_id   IN: Link access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Lcopy(int src_loc, String src_name, int dst_loc,
+        String dst_name, int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lcreate_external creates a new soft link to an external object, which is 
+ *  an object in a different HDF5 file from the location of the link. 
+ *
+ *  @param file_name   IN: Name of the target file containing the target object.
+ *  @param obj_name    IN: Path within the target file to the target object.
+ *  @param link_loc_id IN: The file or group identifier for the new link. 
+ *  @param link_name   IN: The name of the new link.
+ *  @param lcpl_id     IN: Link creation property list identifier
+ *  @param lapl_id     IN: Link access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Lcreate_external(String file_name, String obj_name,
+        int link_loc_id, String link_name, int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lcreate_hard creates a new hard link to a pre-existing object in an HDF5 file.
+ *
+ *  @param cur_loc   IN: The file or group identifier for the target object.
+ *  @param cur_name  IN: Name of the target object, which must already exist.
+ *  @param dst_loc   IN: The file or group identifier for the new link.
+ *  @param dst_name  IN: The name of the new link.
+ *  @param lcpl_id   IN: Link creation property list identifier
+ *  @param lapl_id   IN: Link access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - cur_name or dst_name is null.
+ **/
+public synchronized static native void H5Lcreate_hard(int cur_loc, String cur_name,
+        int dst_loc, String dst_name, int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lcreate_soft creates a new soft link to an object in an HDF5 file.
+ *
+ *  @param link_target IN: Path to the target object, which is not required to exist.
+ *  @param link_loc_id IN: The file or group identifier for the new link.
+ *  @param link_name   IN: The name of the new link.
+ *  @param lcpl_id     IN: Link creation property list identifier
+ *  @param lapl_id     IN: Link access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - link_name is null.
+ **/
+public synchronized static native void H5Lcreate_soft(String link_target, int link_loc_id,
+        String link_name, int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Ldelete removes the link specified from a group. 
+ *
+ *  @param loc_id  IN: Identifier of the file or group containing the object.
+ *  @param name    IN: Name of the link to delete.
+ *  @param lapl_id IN: Link access property list identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Ldelete(int loc_id, String name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Ldelete_by_idx removes the nth link in a group according to the specified order 
+ *  and in the specified index.
+ *
+ *  @param loc_id     IN: File or group identifier specifying location of subject group
+ *  @param group_name IN: Name of subject group
+ *  @param idx_type   IN: Index or field which determines the order 
+ *  @param order      IN: Order within field or index
+ *  @param n          IN: Link for which to retrieve information 
+ *  @param lapl_id    IN: Link access property list identifier 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - group_name is null.
+ **/
+public synchronized static native void H5Ldelete_by_idx(int loc_id, String group_name,
+        int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lexists checks if a link with a particular name exists in a group. 
+ *
+ *  @param loc_id  IN: Identifier of the file or group to query. 
+ *  @param name    IN: The name of the link to check. 
+ *  @param lapl_id IN: Link access property list identifier
+ *
+ *  @return a boolean, true if the name exists, otherwise false.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native boolean H5Lexists(int loc_id, String name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lget_info returns information about the specified link.
+ *
+ *  @param loc_id  IN: Identifier of the file or group. 
+ *  @param name    IN: Name of the link for which information is being sought.
+ *  @param lapl_id IN: Link access property list identifier
+ *
+ *  @return a buffer(H5L_info_t) for the link information.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native H5L_info_t H5Lget_info(int loc_id, String name,
+        int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lget_info_by_idx opens a named datatype at the location specified
+ *  by loc_id and return an identifier for the datatype.
+ *
+ *  @param loc_id     IN: File or group identifier specifying location of subject group
+ *  @param group_name IN: Name of subject group
+ *  @param idx_type   IN: Type of index
+ *  @param order      IN: Order within field or index
+ *  @param n          IN: Link for which to retrieve information 
+ *  @param lapl_id    IN: Link access property list identifier 
+ *
+ *  @return a buffer(H5L_info_t) for the link information.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - group_name is null.
+ **/
+public synchronized static native H5L_info_t H5Lget_info_by_idx(int loc_id, String group_name,
+        int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lget_name_by_idx retrieves name of the nth link in a group, according to 
+ *  the order within a specified field or index. 
+ *
+ *  @param loc_id     IN: File or group identifier specifying location of subject group
+ *  @param group_name IN: Name of subject group
+ *  @param idx_type   IN: Type of index
+ *  @param order      IN: Order within field or index
+ *  @param n          IN: Link for which to retrieve information 
+ *  @param lapl_id    IN: Link access property list identifier 
+ *
+ *  @return a String for the link name.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - group_name is null.
+ **/
+public synchronized static native String H5Lget_name_by_idx(int loc_id, String group_name,
+        int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lget_val returns the link value of a symbolic link.
+ *
+ *  @param loc_id      IN: Identifier of the file or group containing the object.
+ *  @param name        IN: Name of the symbolic link.
+ *  @param link_value OUT: Path of the symbolic link, or the file_name and path of an external file.
+ *  @param lapl_id     IN: Link access property list identifier
+ *
+ *  @return the link type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native int H5Lget_val(int loc_id, String name, String[] link_value, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lget_val_by_idx retrieves value of the nth link in a group, according to the order within an index. 
+ *
+ *  @param loc_id     IN: File or group identifier specifying location of subject group
+ *  @param group_name IN: Name of subject group
+ *  @param idx_type   IN: Type of index
+ *  @param order      IN: Order within field or index
+ *  @param n          IN: Link for which to retrieve information 
+ *  @param link_value OUT: Path of the symbolic link, or the file_name and path of an external file.
+ *  @param lapl_id    IN: Link access property list identifier 
+ *
+ *  @return the link type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - group_name is null.
+ **/
+public synchronized static native int H5Lget_val_by_idx(int loc_id, String group_name,
+        int idx_type, int order, long n, String[] link_value, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Literate iterates through links in a group. 
+*
+*  @param grp_id     IN: Identifier specifying subject group
+*  @param idx_type   IN: Type of index  
+*  @param order      IN: Order of iteration within index 
+*  @param idx        IN: Iteration position at which to start  
+*  @param op         IN: Callback function passing data regarding the link to the calling application  
+*  @param op_data    IN: User-defined pointer to data required by the application for its processing of the link 
+*
+*  @return returns the return value of the first operator that returns a positive value, or zero if all members were 
+*      processed with no operator returning non-zero.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public synchronized static native int H5Literate(int grp_id, 
+        int idx_type, int order,
+        long idx, H5L_iterate_cb op, H5L_iterate_t op_data)
+        throws HDF5LibraryException;
+
+/**
+*  H5Literate_by_name iterates through links in a group. 
+*
+*  @param grp_id     IN: Identifier specifying subject group
+*  @param group_name IN: Name of subject group
+*  @param idx_type   IN: Type of index  
+*  @param order      IN: Order of iteration within index 
+*  @param idx        IN: Iteration position at which to start  
+*  @param op         IN: Callback function passing data regarding the link to the calling application  
+*  @param op_data    IN: User-defined pointer to data required by the application for its processing of the link 
+*  @param lapl_id    IN: Link access property list identifier 
+*
+*  @return returns the return value of the first operator that returns a positive value, or zero if all members were 
+*    processed with no operator returning non-zero.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - group_name is null.
+**/
+public synchronized static native int H5Literate_by_name(int grp_id, String group_name,
+        int idx_type, int order, long idx,
+        H5L_iterate_cb op, H5L_iterate_t op_data, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lmove renames a link within an HDF5 file.
+ *
+ *  @param src_loc   IN: Original file or group identifier.
+ *  @param src_name  IN: Original link name.
+ *  @param dst_loc   IN: Destination file or group identifier.
+ *  @param dst_name  IN: New link name.
+ *  @param lcpl_id   IN: Link creation property list identifier to be associated with the new link.
+ *  @param lapl_id   IN: Link access property list identifier to be associated with the new link.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Lmove(int src_loc, String src_name, int dst_loc,
+        String dst_name, int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Lvisit recursively visits all links starting from a specified group.
+ *
+ *  @param grp_id     IN: Identifier specifying subject group
+ *  @param idx_type   IN: Type of index  
+ *  @param order      IN: Order of iteration within index 
+ *  @param op         IN: Callback function passing data regarding the link to the calling application  
+ *  @param op_data    IN: User-defined pointer to data required by the application for its processing of the link 
+ *
+ *  @return returns the return value of the first operator that returns a positive value, or zero if all members were 
+ *      processed with no operator returning non-zero.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Lvisit(int grp_id, int idx_type, int order,
+         H5L_iterate_cb op, H5L_iterate_t op_data)
+         throws HDF5LibraryException;
+
+/**
+ *  H5Lvisit_by_name recursively visits all links starting from a specified group. 
+ *
+ *  @param loc_id     IN: Identifier specifying subject group
+ *  @param group_name IN: Name of subject group
+ *  @param idx_type   IN: Type of index  
+ *  @param order      IN: Order of iteration within index 
+ *  @param op         IN: Callback function passing data regarding the link to the calling application  
+ *  @param op_data    IN: User-defined pointer to data required by the application for its processing of the link 
+ *
+ *  @return returns the return value of the first operator that returns a positive value, or zero if all members were 
+ *      processed with no operator returning non-zero.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - group_name is null.
+ **/
+ public synchronized static native int H5Lvisit_by_name(int loc_id, String group_name,
+         int idx_type, int order, H5L_iterate_cb op,
+         H5L_iterate_t op_data, int lapl_id)
+         throws HDF5LibraryException, NullPointerException;
+
+///////// unimplemented ////////
+//herr_t H5Lcreate_ud(hid_t link_loc_id, const char *link_name,
+//		    H5L_type_t link_type, const void *udata, size_t udata_size, hid_t lcpl_id,
+//		    hid_t lapl_id);
+
+//htri_t H5Lis_registered(H5L_type_t id);
+
+//herr_t H5Lregister(const H5L_class_t *cls);
+
+//herr_t H5Lunpack_elink_val(const void *ext_linkval/*in*/, size_t link_size,
+//         unsigned *flags, const char **filename/*out*/, const char **obj_path /*out*/);
+
+//herr_t H5Lunregister(H5L_type_t id);
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5O: HDF5 1.8 Object Interface API Functions            //
+////
+//////////////////////////////////////////////////////////////
+
+/**
+*  H5Oclose closes the group, dataset, or named datatype specified.
+*
+*  @param object_id  IN: Object identifier 
+*
+*  @return non-negative on success
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public static int H5Oclose(int object_id) throws HDF5LibraryException
+{
+    if (object_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(object_id);
+    return _H5Oclose(object_id);
+}
+
+private synchronized static native int _H5Oclose(int object_id)
+        throws HDF5LibraryException;
+
+
+
+/**
+*  H5Ocopy copies the group, dataset or named datatype specified from the file or 
+*  group specified by source location to the destination location. 
+*
+*  @param src_loc_id  IN: Object identifier indicating the location of the source object to be copied 
+*  @param src_name    IN: Name of the source object to be copied
+*  @param dst_loc_id  IN: Location identifier specifying the destination  
+*  @param dst_name    IN: Name to be assigned to the new copy 
+*  @param ocpypl_id   IN: Object copy property list  
+*  @param lcpl_id     IN: Link creation property list for the new hard link  
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native void H5Ocopy(int src_loc_id, String src_name, int dst_loc_id,
+        String dst_name, int ocpypl_id, int lcpl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Oget_comment retrieves the comment for the specified object.
+*
+*  @param obj_id  IN: File or group identifier 
+*
+*  @return the comment
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public synchronized static native String H5Oget_comment(int obj_id)
+        throws HDF5LibraryException;
+
+/**
+*  H5Oset_comment sets the comment for the specified object.
+*
+*  @param obj_id  IN: Identifier of the target object
+*  @param comment IN: The new comment.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+* 
+* @deprecated As of HDF5 1.8 in favor of object attributes. 
+**/
+ at Deprecated
+public synchronized static native void H5Oset_comment(int obj_id, String comment)
+        throws HDF5LibraryException;
+
+/**
+*  H5Oget_comment_by_name retrieves the comment for an object.
+*
+*  @param loc_id  IN: Identifier of a file, group, dataset, or named datatype.
+*  @param name    IN: Relative name of the object whose comment is to be set or reset.
+*  @param lapl_id IN: Link access property list identifier. 
+*
+*  @return the comment
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native String H5Oget_comment_by_name(int loc_id, String name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+//long H5Oget_comment_by_name(int loc_id, String name, String comment, long bufsize, int lapl_id);
+
+/**
+*  H5Oset_comment_by_name sets the comment for the specified object.
+*
+*  @param loc_id  IN: Identifier of a file, group, dataset, or named datatype.
+*  @param name    IN: Relative name of the object whose comment is to be set or reset.
+*  @param comment IN: The new comment.
+*  @param lapl_id IN: Link access property list identifier. 
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+* 
+* @deprecated As of HDF5 1.8 in favor of object attributes. 
+**/
+ at Deprecated
+public synchronized static native void H5Oset_comment_by_name(int loc_id, String name,
+        String comment, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Oget_info retrieves the metadata for an object specified by an identifier. 
+ *
+ *  @param loc_id  IN: Identifier for target object 
+ *
+ *  @return object information
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native H5O_info_t H5Oget_info(int loc_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Oget_info_by_idx retrieves the metadata for an object, identifying the object by an index position. 
+*
+*  @param loc_id     IN: File or group identifier 
+*  @param group_name IN: Name of group, relative to loc_id, in which object is located
+*  @param idx_type   IN: Type of index by which objects are ordered  
+*  @param order      IN: Order of iteration within index 
+*  @param n          IN: Object to open 
+*  @param lapl_id    IN: Access property list identifier for the link pointing to the object (Not currently used; pass as H5P_DEFAULT.)
+*
+*  @return object information
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native H5O_info_t H5Oget_info_by_idx(int loc_id, String group_name,
+        int idx_type, int order, long n, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and relative name. 
+*
+*  @param loc_id  IN: File or group identifier specifying location of group in which object is located
+*  @param name    IN: Relative name of group
+*  @param lapl_id IN: Access property list identifier for the link pointing to the object (Not currently used; pass as H5P_DEFAULT.)
+*
+*  @return  object information
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native H5O_info_t H5Oget_info_by_name(int loc_id, String name, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Olink creates a new hard link to an object in an HDF5 file. 
+*
+*  @param obj_id      IN: Object to be linked.
+*  @param new_loc_id  IN: File or group identifier specifying location at which object is to be linked. 
+*  @param new_name    IN: Relative name of link to be created.
+*  @param lcpl_id     IN: Link creation property list identifier. 
+*  @param lapl_id     IN: Access property list identifier.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native void H5Olink(int obj_id, int new_loc_id, String new_name,
+        int lcpl_id, int lapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Oopen opens a group, dataset, or named datatype specified by a location and a path name.
+*
+*  @param loc_id  IN: File or group identifier 
+*  @param name    IN: Relative path to the object
+*  @param lapl_id IN: Access property list identifier for the link pointing to the object 
+*
+*  @return an object identifier for the opened object
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public static int H5Oopen(int loc_id, String name, int lapl_id) 
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Oopen(loc_id, name, lapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Oopen(int loc_id, String name,
+        int lapl_id) throws HDF5LibraryException, NullPointerException;
+
+
+/**
+*  H5Ovisit recursively visits all objects accessible from a specified object. 
+*
+*  @param obj_id     IN: Identifier of the object at which the recursive iteration begins.  
+*  @param idx_type   IN: Type of index  
+*  @param order      IN: Order of iteration within index 
+*  @param op         IN: Callback function passing data regarding the object to the calling application  
+*  @param op_data    IN: User-defined pointer to data required by the application for its processing of the object 
+*
+*  @return returns the return value of the first operator that returns a positive value, or zero if all members were 
+     processed with no operator returning non-zero.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native int H5Ovisit(int obj_id, int idx_type, int order,
+      H5O_iterate_cb op, H5O_iterate_t op_data)
+      throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Ovisit_by_name recursively visits all objects starting from a specified object.
+*
+*  @param loc_id    IN: File or group identifier 
+*  @param obj_name  IN: Relative path to the object
+*  @param idx_type  IN: Type of index  
+*  @param order     IN: Order of iteration within index 
+*  @param op        IN: Callback function passing data regarding the object to the calling application  
+*  @param op_data   IN: User-defined pointer to data required by the application for its processing of the object 
+*  @param lapl_id   IN: Link access property list identifier
+*
+*  @return returns the return value of the first operator that returns a positive value, or zero if all members 
+    were processed with no operator returning non-zero.
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public synchronized static native int H5Ovisit_by_name(int loc_id, String obj_name,
+      int idx_type, int order, H5O_iterate_cb op,
+      H5O_iterate_t op_data, int lapl_id)
+      throws HDF5LibraryException, NullPointerException;
+
+///////// unimplemented ////////
+
+///**
+//*  H5Odecr_refcount decrements the hard link reference count for an object.
+//*
+//*  @param object_id  IN: Object identifier 
+//*
+//*  @return none
+//*
+//*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+//**/
+//public synchronized static native void H5Odecr_refcount(int object_id)
+//    throws HDF5LibraryException;
+
+///**
+//*  H5Oincr_refcount increments the hard link reference count for an object.
+//*
+//*  @param object_id  IN: Object identifier 
+//*
+//*  @return none
+//*
+//*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+//**/
+//public synchronized static native void H5Oincr_refcount(int object_id)
+//    throws HDF5LibraryException;
+
+///**
+//*  H5Oopen_by_addr opens a group, dataset, or named datatype using its address within an HDF5 file.
+//*
+//*  @param loc_id  IN: File or group identifier 
+//*  @param addr    IN: Object's address in the file 
+//*
+//*  @return an object identifier for the opened object
+//*
+//*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+//**/
+//public synchronized static native int H5Oopen_by_addr(int loc_id, long addr)
+//      throws HDF5LibraryException;
+
+///**
+//*  H5Oopen_by_idx opens the nth object in the group specified.
+//*
+//*  @param loc_id     IN: File or group identifier 
+//*  @param group_name IN: Name of group, relative to loc_id, in which object is located
+//*  @param idx_type   IN: Type of index by which objects are ordered  
+//*  @param order      IN: Order of iteration within index 
+//*  @param n          IN: Object to open 
+//*  @param lapl_id    IN: Access property list identifier for the link pointing to the object 
+//*
+//*  @return an object identifier for the opened object
+//*
+//*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+//*  @exception NullPointerException - group_name is null.
+//**/
+//public synchronized static native int H5Oopen_by_idx(int loc_id, String group_name,
+//      H5_INDEX idx_type, H5_ITER order, long n, int lapl_id)
+//      throws HDF5LibraryException, NullPointerException;
+
+ //////////////////////////////////////////////////////////////
+ //                                                          //
+ // H5P: Property List Interface Functions                   //
+ //                                                          //
+ //////////////////////////////////////////////////////////////
+
+public synchronized static native boolean H5Pall_filters_avail(int dcpl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pclose terminates access to a property list.
+ * 
+ * @param plist
+ *            IN: Identifier of the property list to terminate access to.
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Pclose(int plist) throws HDF5LibraryException
+{
+    if (plist < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(plist);
+    return _H5Pclose(plist);
+}
+
+private synchronized static native int _H5Pclose(int plist)
+        throws HDF5LibraryException;
+
+/**
+ * Closes an existing property list class
+ * 
+ * @param plid
+ *            IN: Property list class to close
+ * @return a non-negative value if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pclose_class(int plid)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pcopy copies an existing property list to create a new property list.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to duplicate.
+ * 
+ * @return a property list identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Pcopy(int plist) throws HDF5LibraryException
+{
+    int id = _H5Pcopy(plist);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Pcopy(int plist)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pcopy_prop copies a property from one property list or class to another
+ * 
+ * @param dst_id
+ *            IN: Identifier of the destination property list or class
+ * @param src_id
+ *            IN: Identifier of the source property list or class
+ * @param name
+ *            IN: Name of the property to copy
+ * @return a non-negative value if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pcopy_prop(int dst_id, int src_id,
+        String name) throws HDF5LibraryException;
+
+/**
+ * H5Pcreate creates a new property as an instance of some property list
+ * class.
+ * 
+ * @param type
+ *            IN: The type of property list to create.
+ * 
+ * @return a property list identifier (plist) if successful; otherwise Fail
+ *         (-1).
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Pcreate(int type) throws HDF5LibraryException
+{
+    int id = _H5Pcreate(type);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Pcreate(int type)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pequal determines if two property lists or classes are equal
+ * 
+ * @param plid1
+ *            IN: First property object to be compared
+ * @param plid2
+ *            IN: Second property object to be compared
+ * @return positive value if equal; zero if unequal, a negative value if
+ *         failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pequal(int plid1, int plid2)
+        throws HDF5LibraryException;
+
+public static boolean H5P_equal(int plid1, int plid2)
+        throws HDF5LibraryException
+{
+    if(H5Pequal(plid1, plid2)==1) return true;
+    return false;
+}
+
+/**
+ * H5Pexist determines whether a property exists within a property list or
+ * class
+ * 
+ * @param plid
+ *            IN: Identifier for the property to query
+ * @param name
+ *            IN: Name of property to check for
+ * @return a positive value if the property exists in the property object;
+ *         zero if the property does not exist; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pexist(int plid, String name)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Pfill_value_defined(int plist_id,
+        int[] status) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget retrieves a copy of the value for a property in a property list
+ * (support integer only)
+ * 
+ * @param plid
+ *            IN: Identifier of property object to query
+ * @param name
+ *            IN: Name of property to query
+ * @return value for a property if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pget(int plid, String name)
+        throws HDF5LibraryException;
+
+/**
+ * Sets a property list value (support integer only)
+ * 
+ * @param plid
+ *            IN: Property list identifier to modify
+ * @param name
+ *            IN: Name of property to modify
+ * @param value
+ *            IN: value to set the property to
+ * @return a non-negative value if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pset(int plid, String name,
+        int value) throws HDF5LibraryException;
+
+/**
+ * H5Pget_alignment retrieves the current settings for alignment properties
+ * from a file access property list.
+ * 
+ * @param plist
+ *            IN: Identifier of a file access property list.
+ * @param alignment
+ *            OUT: threshold value and alignment value.
+ * 
+ *            <pre>
+ *      alignment[0] = threshold // threshold value
+ *      alignment[1] = alignment // alignment value
+ * </pre>
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - aligment array is null.
+ * @exception IllegalArgumentException
+ *                - aligment array is invalid.
+ **/
+public synchronized static native int H5Pget_alignment(int plist,
+        long[] alignment)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Pset_alignment sets the alignment properties of a file access property
+ * list so that any file object >= THRESHOLD bytes will be aligned on an
+ * address which is a multiple of ALIGNMENT.
+ * 
+ * @param plist
+ *            IN: Identifier for a file access property list.
+ * @param threshold
+ *            IN: Threshold value.
+ * @param alignment
+ *            IN: Alignment value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_alignment(int plist,
+        long threshold, long alignment) throws HDF5LibraryException;
+
+public synchronized static native int H5Pget_alloc_time(int plist_id,
+        int[] alloc_time) throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_alloc_time(int plist_id,
+        int alloc_time) throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pget_attr_creation_order retrieves the settings for tracking and indexing attribute creation order on an object  
+* @param ocpl_id            IN: Object (group or dataset) creation property list identifier
+* 
+* @return Flags specifying whether to track and index attribute creation order 
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_attr_creation_order(int ocpl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_attr_creation_order sets flags specifying whether to track and index attribute creation order on an object.  
+* @param ocpl_id                  IN: Object creation property list identifier
+* @param crt_order_flags          IN: Flags specifying whether to track and index attribute creation order
+* 
+* @return Returns a non-negative value if successful; otherwise returns a negative value. 
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_attr_creation_order(int ocpl_id, int crt_order_flags)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_attr_phase_change retrieves attribute storage phase change thresholds. 
+* @param ocpl_id      IN: : Object (dataset or group) creation property list identifier 
+* @param attributes 
+*               The maximun and minimum no. of attributes
+*               to be stored.
+*
+*      <pre>
+*      attributes[0] =  The maximum number of attributes to be stored in compact storage
+*      attributes[1] =  The minimum number of attributes to be stored in dense storage 
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - size is null.
+*  
+**/
+public synchronized static native int H5Pget_attr_phase_change(int ocpl_id, int []attributes) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_btree_ratio Get the B-tree split ratios for a dataset transfer
+ * property list.
+ * 
+ * @param plist_id
+ *            IN Dataset transfer property list
+ * @param left
+ *            OUT split ratio for leftmost nodes
+ * @param right
+ *            OUT split ratio for righttmost nodes
+ * @param middle
+ *            OUT split ratio for all other nodes
+ * 
+ * @return non-negative if succeed
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - an input array is null.
+ **/
+public synchronized static native int H5Pget_btree_ratios(int plist_id,
+        double[] left, double[] middle, double[] right)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pset_btree_ratio Sets B-tree split ratios for a dataset transfer
+ * property list. The split ratios determine what percent of children go in
+ * the first node when a node splits.
+ * 
+ * @param plist_id
+ *            IN Dataset transfer property list
+ * @param left
+ *            IN split ratio for leftmost nodes
+ * @param right
+ *            IN split ratio for righttmost nodes
+ * @param middle
+ *            IN split ratio for all other nodes
+ * 
+ * @return non-negative if succeed
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_btree_ratios(int plist_id,
+        double left, double middle, double right)
+        throws HDF5LibraryException;
+
+/**
+ * HH5Pget_buffer gets type conversion and background buffers. Returns
+ * buffer size, in bytes, if successful; otherwise 0 on failure.
+ * 
+ * @param plist
+ *            Identifier for the dataset transfer property list.
+ * @param tconv
+ *            byte array of application-allocated type conversion buffer.
+ * @param bkg
+ *            byte array of application-allocated background buffer.
+ * 
+ * @return buffer size, in bytes, if successful; otherwise 0 on failure
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ *                - plist is invalid.
+ **/
+public synchronized static native int H5Pget_buffer(int plist,
+        byte[] tconv, byte[] bkg)
+        throws HDF5LibraryException, IllegalArgumentException;
+public synchronized static native long H5Pget_buffer_size(int plist)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * H5Pset_buffer sets type conversion and background buffers. status to TRUE
+ * or FALSE.
+ * 
+ * Given a dataset transfer property list, H5Pset_buffer sets the maximum
+ * size for the type conversion buffer and background buffer and optionally
+ * supplies pointers to application-allocated buffers. If the buffer size is
+ * smaller than the entire amount of data being transferred between the
+ * application and the file, and a type conversion buffer or background
+ * buffer is required, then strip mining will be used.
+ * 
+ * Note that there are minimum size requirements for the buffer. Strip
+ * mining can only break the data up along the first dimension, so the
+ * buffer must be large enough to accommodate a complete slice that
+ * encompasses all of the remaining dimensions. For example, when strip
+ * mining a 100x200x300 hyperslab of a simple data space, the buffer must be
+ * large enough to hold 1x200x300 data elements. When strip mining a
+ * 100x200x300x150 hyperslab of a simple data space, the buffer must be
+ * large enough to hold 1x200x300x150 data elements.
+ * 
+ * @param plist
+ *            Identifier for the dataset transfer property list.
+ * @param size
+ *            Size, in bytes, of the type conversion and background buffers.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ *                - plist is invalid.
+ **/
+public synchronized static native void H5Pset_buffer_size(int plist, long size)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * Retrieves the maximum possible number of elements in the meta data cache
+ * and the maximum possible number of bytes and the RDCC_W0 value in the raw
+ * data chunk cache.
+ * 
+ * @param plist       IN: Identifier of the file access property list.
+ * @param mdc_nelmts  IN/OUT: No longer used, will be ignored.
+ * @param rdcc_nelmts IN/OUT: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes IN/OUT: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0     IN/OUT: Preemption policy.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - an array is null.
+ **/
+public synchronized static native int H5Pget_cache(int plist,
+        int[] mdc_nelmts, long[] rdcc_nelmts, long[] rdcc_nbytes,
+        double[] rdcc_w0) throws HDF5LibraryException, NullPointerException;
+/** 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Pget_cache(int, int[], long[], long[], double[]) }
+ *             because of possible loss of precision
+ **/ 
+ at Deprecated
+public static int H5Pget_cache(int plist,
+        int[] mdc_nelmts, int[] rdcc_nelmts, int[] rdcc_nbytes,
+        double[] rdcc_w0) throws HDF5LibraryException, NullPointerException
+{
+    long[] rdcc_nelmts_l = {rdcc_nelmts[0]};
+    long[] rdcc_nbytes_l = {rdcc_nbytes[0]};
+    int retval = H5Pget_cache(plist, mdc_nelmts, rdcc_nelmts_l, rdcc_nbytes_l, rdcc_w0);
+    rdcc_nelmts[0] = (int)rdcc_nelmts_l[0];
+    rdcc_nbytes[0] = (int)rdcc_nbytes_l[0];
+    return retval;
+}
+
+/**
+ * H5Pset_cache sets the number of elements (objects) in the meta data cache
+ * and the total number of bytes in the raw data chunk cache.
+ * 
+ * @param plist       IN: Identifier of the file access property list.
+ * @param mdc_nelmts  IN: No longer used, will be ignored.
+ * @param rdcc_nelmts IN: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes IN: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0     IN: Preemption policy.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_cache(int plist,
+        int mdc_nelmts, long rdcc_nelmts, long rdcc_nbytes, double rdcc_w0)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Pget_char_encoding(int plist_id)
+        throws HDF5LibraryException;
+public synchronized static native void H5Pset_char_encoding(int plist_id, int encoding) 
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_chunk retrieves the size of chunks for the raw data of a chunked
+ * layout dataset.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @param max_ndims
+ *            IN: Size of the dims array.
+ * @param dims
+ *            OUT: Array to store the chunk dimensions.
+ * 
+ * @return chunk dimensionality successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - dims array is null.
+ * @exception IllegalArgumentException
+ *                - max_ndims <=0
+ **/
+public synchronized static native int H5Pget_chunk(int plist,
+        int max_ndims, long[] dims)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Pset_chunk sets the size of the chunks used to store a chunked layout
+ * dataset.
+ * 
+ * @param plist
+ *            IN: Identifier for property list to query.
+ * @param ndims
+ *            IN: The number of dimensions of each chunk.
+ * @param dim
+ *            IN: An array containing the size of each chunk.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - dims array is null.
+ * @exception IllegalArgumentException
+ *                - dims <=0
+ **/
+public synchronized static native int H5Pset_chunk(int plist, int ndims,
+        byte[] dim)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+public synchronized static int H5Pset_chunk(int plist, int ndims, long[] dim)
+        throws HDF5Exception, NullPointerException,
+        IllegalArgumentException
+{
+    if (dim == null) {
+        return -1;
+    }
+
+    HDFArray theArray = new HDFArray(dim);
+    byte[] thedims = theArray.byteify();
+
+    int retVal = H5Pset_chunk(plist, ndims, thedims);
+
+    thedims = null;
+    theArray = null;
+    return retVal;
+}
+
+
+/**
+ * Retrieves the maximum possible number of elements in the meta data cache
+ * and the maximum possible number of bytes and the RDCC_W0 value in the raw
+ * data chunk cache on a per-datset basis.
+ * 
+ * @param dapl_id     IN: Identifier of the dataset access property list.
+ * @param rdcc_nslots IN/OUT: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes IN/OUT: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0     IN/OUT: Preemption policy.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - an array is null.
+ **/
+public synchronized static native void H5Pget_chunk_cache(int dapl_id,
+        long[] rdcc_nslots, long[] rdcc_nbytes, double[] rdcc_w0)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pset_chunk_cache sets the number of elements (objects) in the meta data cache
+ * and the total number of bytes in the raw data chunk cache on a per-datset basis.
+ * 
+ * @param dapl_id     IN: Identifier of the datset access property list.
+ * @param rdcc_nslots IN: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes IN: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0     IN: Preemption policy.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Pset_chunk_cache(int dapl_id,
+        long rdcc_nslots, long rdcc_nbytes, double rdcc_w0)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_class returns the property list class for the property list
+ * identified by the plist parameter.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @return a property list class if successful. Otherwise returns
+ *         H5P_NO_CLASS (-1).
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pget_class(int plist)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_class_name retrieves the name of a generic property list class
+ * 
+ * @param plid
+ *            IN: Identifier of property object to query
+ * @return name of a property list if successful; null if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native String H5Pget_class_name(int plid)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_class_parent retrieves an identifier for the parent class of a
+ * property class
+ * 
+ * @param plid
+ *            IN: Identifier of the property class to query
+ * @return a valid parent class object identifier if successful; a negative
+ *         value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pget_class_parent(int plid)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_copy_object retrieves the properties to be used when an object is copied.  
+* @param ocp_plist_id            IN: Object copy property list identifier
+*  
+* @return Copy option(s) set in the object copy property list  
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_copy_object(int ocp_plist_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_copy_object Sets properties to be used when an object is copied.  
+* @param ocp_plist_id          IN: Object copy property list identifier
+* @param copy_options          IN: Copy option(s) to be set
+*  
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native void H5Pset_copy_object(int ocp_plist_id, int copy_options)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_create_intermediate_group determines whether property is set to enable creating missing intermediate groups.  
+* @param lcpl_id                IN:  Link creation property list identifier
+*  
+* @return Boolean true or false   
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native boolean H5Pget_create_intermediate_group(int lcpl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_create_intermediate_group specifies in property list whether to create missing intermediate groups 
+* @param lcpl_id               IN: Link creation property list identifier
+* @param crt_intermed_group    IN: Flag specifying whether to create intermediate groups upon the creation of an object 
+*  
+* @return a non-negative valule if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_create_intermediate_group(int lcpl_id, boolean crt_intermed_group)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_data_transform retrieves the data transform expression previously set in the dataset transfer property list plist_id by H5Pset_data_transform.
+* @param plist_id               IN: Identifier of the property list or class
+* @param size                   IN: Number of bytes of the transform expression to copy to
+* @param expression            OUT: A data transform expression
+* 
+* @return The size of the transform expression if successful; 0(zero) if no transform expression exists. Otherwise returns a negative value. 
+* 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Size is <= 0.
+*  
+**/
+public synchronized static native long H5Pget_data_transform( int plist_id, String[] expression, long size)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pset_data_transform sets a data transform expression 
+* @param plist_id              IN: Identifier of the property list or class
+* @param expression            IN: Pointer to the null-terminated data transform expression 
+*  
+* @return a non-negative valule if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - expression is null.
+*  
+**/
+public synchronized static native int H5Pset_data_transform(int plist_id, String expression)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_driver returns the identifier of the low-level file driver 
+ * associated with the file access property list or data transfer 
+ * property list plid.
+ * 
+ * @param plid
+ *            IN: File access or data transfer property list identifier.
+ * @return a valid low-level driver identifier if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pget_driver(int plid)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Pget_edc_check(int plist)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_edc_check(int plist, int check)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pget_elink_acc_flags retrieves the external link traversal file access flag from the specified link access property list.
+* @param lapl_id                IN: Link access property list identifier 
+* 
+* @return File access flag for link traversal.  
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_elink_acc_flags(int lapl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_elink_acc_flags Sets the external link traversal file access flag in a link access property list. 
+* @param lapl_id                   IN: Link access property list identifier
+* @param flags                     IN: The access flag for external link traversal.  
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception IllegalArgumentException - Invalid Flag values.
+*  
+**/
+public synchronized static native int H5Pset_elink_acc_flags(int lapl_id, int flags)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pget_elink_fapl Retrieves the file access property list identifier associated with 
+* the link access property list.   
+* 
+* @param lapl_id                IN: Link access property list identifier
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public static int H5Pget_elink_fapl(int lapl_id)
+        throws HDF5LibraryException
+{
+    int id = _H5Pget_elink_fapl(lapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Pget_elink_fapl(int lapl_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_elink_fapl sets a file access property list for use in accessing a 
+ * file pointed to by an external link.  
+ * 
+ * @param lapl_id                IN: Link access property list identifier
+ * @param fapl_id                IN: File access property list identifier
+ *  
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  
+ **/
+public synchronized static native int H5Pset_elink_fapl(int lapl_id, int fapl_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_elink_file_cache_size retrieves the size of the external link open file cache. 
+ * @param fapl_id                 IN: File access property list identifier
+ *  
+ * @return External link open file cache size in number of files. 
+ *  
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  
+ **/
+public synchronized static native int H5Pget_elink_file_cache_size(int fapl_id)
+        throws HDF5LibraryException; 
+
+/**
+ * H5Pset_elink_file_cache_size sets the number of files that can be held open in an external link open file cache. 
+ * @param fapl_id                 IN: File access property list identifier
+ * @param efc_size                IN: External link open file cache size in number of files. 
+ *  
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  
+ **/
+public synchronized static native void H5Pset_elink_file_cache_size(int fapl_id, int efc_size) 
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_elink_prefix Retrieves prefix applied to external link paths.
+* @param lapl_id                IN: Link access property list identifier
+* @param prefix                OUT: Prefix applied to external link paths
+* 
+* @return If successful, returns a non-negative value specifying the size in bytes of the prefix without the NULL terminator; 
+*         otherwise returns a negative value.  
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - prefix is null.
+*  
+**/
+public synchronized static native long H5Pget_elink_prefix(int lapl_id, String[] prefix)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_elink_prefix Sets prefix to be applied to external link paths.   
+* @param lapl_id                IN: Link access property list identifier
+* @param prefix                     IN: Prefix to be applied to external link paths
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - prefix is null.
+*  
+**/
+public synchronized static native int H5Pset_elink_prefix(int lapl_id, String prefix)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pget_est_link_info Queries data required to estimate required local heap or object header size. 
+* @param gcpl_id                IN: Group creation property list identifier 
+* @param link_info
+*               Estimated number of links to be inserted into group
+*               And the estimated average length of link names         
+*
+*      <pre>
+*      link_info[0] =  Estimated number of links to be inserted into group
+*      link_info[1] =  Estimated average length of link names   
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - link_info is null.
+*  
+**/
+public synchronized static native int H5Pget_est_link_info(int gcpl_id, int []link_info) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_est_link_info Sets estimated number of links and length of link names in a group.  
+* @param gcpl_id                IN: Group creation property list identifier
+* @param est_num_entries        IN: Estimated number of links to be inserted into group
+* @param est_name_len           IN: Estimated average length of link names
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Invalid values to est_num_entries and est_name_len.
+*  
+**/
+public synchronized static native int H5Pset_est_link_info(int gcpl_id, int est_num_entries, int est_name_len)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * H5Pget_external returns information about an external file.
+ * 
+ * @param plist
+ *            IN: Identifier of a dataset creation property list.
+ * @param idx
+ *            IN: External file index.
+ * @param name_size
+ *            IN: Maximum length of name array.
+ * @param name
+ *            OUT: Name of the external file.
+ * @param size
+ *            OUT: the offset value and the size of the external file data.
+ * 
+ *            <pre>
+ *      size[0] = offset // a location to return an offset value
+ *      size[1] = size // a location to return the size of
+ *                // the external file data.
+ * </pre>
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Fatal error on Copyback
+ * @exception ArrayStoreException
+ *                Fatal error on Copyback
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name or size is null.
+ * @exception IllegalArgumentException
+ *                - name_size <= 0 .
+ * 
+ **/
+public synchronized static native int H5Pget_external(int plist, int idx,
+        long name_size, String[] name, long[] size)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Pset_external adds an external file to the list of external files.
+ * 
+ * @param plist
+ *            IN: Identifier of a dataset creation property list.
+ * @param name
+ *            IN: Name of an external file.
+ * @param offset
+ *            IN: Offset, in bytes, from the beginning of the file to the
+ *            location in the file where the data starts.
+ * @param size
+ *            IN: Number of bytes reserved in the file for the data.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - name is null.
+ **/
+public synchronized static native int H5Pset_external(int plist,
+        String name, long offset, long size)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_external_count returns the number of external files for the
+ * specified dataset.
+ * 
+ * @param plist
+ *            IN: Identifier of a dataset creation property list.
+ * 
+ * @return the number of external files if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pget_external_count(int plist)
+        throws HDF5LibraryException;
+
+public synchronized static native long H5Pget_family_offset(int fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_family_offset(int fapl_id,
+        long offset) throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native void H5Pget_fapl_core(int fapl_id,
+        long[] increment, boolean[] backing_store)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fapl_core(int fapl_id,
+        long increment, boolean backing_store)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pget_fapl_direct   Retrieve direct I/O settings.
+* @param fapl_id                     IN: File access property list identifier 
+* @param info                       OUT: Returned property list information 
+*        info[0] = alignment   Required memory alignment boundary 
+*        info[1] = block_size  File system block size 
+*        info[2] = cbuf_size   Copy buffer size 
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_fapl_direct(int fapl_id, long[]info) throws HDF5LibraryException;
+
+/**
+* H5Pset_fapl_direct Sets up use of the direct I/O driver.   
+* @param fapl_id        IN: File access property list identifier 
+* @param alignment      IN: Required memory alignment boundary 
+* @param block_size     IN: File system block size 
+* @param cbuf_size      IN: Copy buffer size 
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_fapl_direct(int fapl_id, long alignment, long block_size, long cbuf_size)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Pget_fapl_family(int fapl_id,
+        long[] memb_size, int[] memb_fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fapl_family(int fapl_id,
+        long memb_size, int memb_fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+//herr_t H5Pget_fapl_mpio( int fapl_id, MPI_Comm *comm, MPI_Info *info ) 
+//herr_t H5Pset_fapl_mpio( int fapl_id, MPI_Comm comm, MPI_Info info ) 
+
+//herr_t H5Pget_fapl_mpiposix( int fapl_id, MPI_Comm *comm, hbool_t *use_gpfs_hints ) 
+//herr_t H5Pset_fapl_mpiposix( int fapl_id, MPI_Comm comm, hbool_t use_gpfs_hints ) 
+
+
+/**
+ * H5Pget_fapl_multi Sets up use of the multi I/O driver.   
+ * @param fapl_id     IN: File access property list identifier 
+ * @param memb_map    IN: Maps memory usage types to other memory usage types.
+ * @param memb_fapl   IN: Property list for each memory usage type.
+ * @param memb_name   IN: Name generator for names of member files.
+ * @param memb_addr   IN: The offsets within the virtual address space, from 0 (zero) to HADDR_MAX, at which each type of data storage begins.
+ *  
+ * @return a boolean value; Allows read-only access to incomplete file sets when TRUE.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - an array is null.
+ *  
+ **/
+public synchronized static native boolean H5Pget_fapl_multi(int fapl_id, int[] memb_map, 
+        int[] memb_fapl, String[] memb_name, long[] memb_addr) 
+        throws HDF5LibraryException, NullPointerException;
+
+
+/**
+ * H5Pset_fapl_multi Sets up use of the multi I/O driver.   
+ * @param fapl_id     IN: File access property list identifier 
+ * @param memb_map    IN: Maps memory usage types to other memory usage types.
+ * @param memb_fapl   IN: Property list for each memory usage type.
+ * @param memb_name   IN: Name generator for names of member files.
+ * @param memb_addr   IN: The offsets within the virtual address space, from 0 (zero) to HADDR_MAX, at which each type of data storage begins.
+ * @param relax       IN: Allows read-only access to incomplete file sets when TRUE.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - an array is null.
+ *  
+ **/
+public synchronized static native void H5Pset_fapl_multi(int fapl_id, int[] memb_map, 
+        int[] memb_fapl, String[] memb_name, long[] memb_addr, boolean relax) 
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pget_fclose_degree(int plist_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fclose_degree(int plist,
+        int degree) throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pget_fill_time(int plist_id,
+        int[] fill_time) throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fill_time(int plist_id,
+        int fill_time) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_fill_value queries the fill value property of a dataset creation
+ * property list.
+ * 
+ * @param plist_id
+ *            IN: Property list identifier.
+ * @param type_id
+ *            IN: The datatype identifier of value.
+ * @param value
+ *            IN: The fill value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ **/
+public synchronized static native int H5Pget_fill_value(int plist_id,
+        int type_id, byte[] value) throws HDF5Exception;
+
+/**
+ * H5Pget_fill_value queries the fill value property of a dataset creation
+ * property list.
+ * 
+ * @param plist_id
+ *            IN: Property list identifier.
+ * @param type_id
+ *            IN: The datatype identifier of value.
+ * @param obj
+ *            IN: The fill value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ **/
+public synchronized static int H5Pget_fill_value(int plist_id, int type_id,
+        Object obj) throws HDF5Exception
+{
+    HDFArray theArray = new HDFArray(obj);
+    byte[] buf = theArray.emptyBytes();
+
+    int status = H5Pget_fill_value(plist_id, type_id, buf);
+    if (status >= 0) {
+        obj = theArray.arrayify(buf);
+    }
+
+    return status;
+}
+
+/**
+ * H5Pset_fill_value sets the fill value for a dataset creation property
+ * list.
+ * 
+ * @param plist_id
+ *            IN: Property list identifier.
+ * @param type_id
+ *            IN: The datatype identifier of value.
+ * @param value
+ *            IN: The fill value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5Exception
+ *                - Error converting data array
+ **/
+public synchronized static native int H5Pset_fill_value(int plist_id,
+        int type_id, byte[] value) throws HDF5Exception;
+
+/**
+ * H5Pset_fill_value sets the fill value for a dataset creation property
+ * list.
+ * 
+ * @param plist_id
+ *            IN: Property list identifier.
+ * @param type_id
+ *            IN: The datatype identifier of value.
+ * @param obj
+ *            IN: The fill value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5Exception
+ *                - Error converting data array
+ **/
+public synchronized static int H5Pset_fill_value(int plist_id, int type_id,
+        Object obj) throws HDF5Exception
+{
+    HDFArray theArray = new HDFArray(obj);
+    byte[] buf = theArray.byteify();
+
+    int retVal = H5Pset_fill_value(plist_id, type_id, buf);
+
+    buf = null;
+    theArray = null;
+    return retVal;
+}
+
+/**
+ * H5Pget_filter returns information about a filter, specified by its filter
+ * number, in a filter pipeline, specified by the property list with which
+ * it is associated.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Pget_filter(int, int, int[], long[], int[], long, String[], int[]) }
+ * 
+ * @param plist
+ *            IN: Property list identifier.
+ * @param filter_number
+ *            IN: Sequence number within the filter pipeline of the filter
+ *            for which information is sought.
+ * @param flags
+ *            OUT: Bit vector specifying certain general properties of the
+ *            filter.
+ * @param cd_nelmts
+ *            IN/OUT: Number of elements in cd_values
+ * @param cd_values
+ *            OUT: Auxiliary data for the filter.
+ * @param namelen
+ *            IN: Anticipated number of characters in name.
+ * @param name
+ *            OUT: Name of the filter.
+ * 
+ * @return the filter identification number if successful. Otherwise returns
+ *         H5Z_FILTER_ERROR (-1).
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Fatal error on Copyback
+ * @exception ArrayStoreException
+ *                Fatal error on Copyback
+ * @exception NullPointerException
+ *                - name or an array is null.
+ * 
+ **/
+ at Deprecated
+public synchronized static native int H5Pget_filter(int plist, int filter_number, int[] flags, 
+        int[] cd_nelmts, int[] cd_values, int namelen, String[] name) 
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, 
+        NullPointerException;
+
+/**
+ * H5Pget_filter returns information about a filter, specified by its filter
+ * number, in a filter pipeline, specified by the property list with which
+ * it is associated.
+ * 
+ * @param plist             IN: Property list identifier.
+ * @param filter_number     IN: Sequence number within the filter pipeline of the filter
+ *                                 for which information is sought.
+ * @param flags            OUT: Bit vector specifying certain general properties of the
+ *                                 filter.
+ * @param cd_nelmts     IN/OUT: Number of elements in cd_values
+ * @param cd_values        OUT: Auxiliary data for the filter.
+ * @param namelen           IN: Anticipated number of characters in name.
+ * @param name             OUT: Name of the filter.
+ * @param filter_config    OUT:A bit field encoding the returned filter information 
+ * 
+ * @return the filter identification number if successful. Otherwise returns
+ *         H5Z_FILTER_ERROR (-1).
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Fatal error on Copyback
+ * @exception ArrayStoreException
+ *                Fatal error on Copyback
+ * @exception NullPointerException
+ *                - name or an array is null.
+ * 
+ **/
+public static int H5Pget_filter(int plist, int filter_number, int[] flags, long[] cd_nelmts, 
+        int[] cd_values, long namelen, String[] name, int[] filter_config)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, 
+        NullPointerException
+{
+    return H5Pget_filter2(plist, filter_number, flags, cd_nelmts, cd_values, namelen, 
+            name, filter_config);
+}
+
+/**
+ * H5Pget_filter2 returns information about a filter, specified by its filter
+ * number, in a filter pipeline, specified by the property list with which
+ * it is associated.
+ * 
+ * @see public static int H5Pget_filter(int plist, int filter_number, int[] flags, 
+ * int[] cd_nelmts, int[] cd_values, int namelen, String[] name, int[] filter_config)
+ * 
+ **/
+private synchronized static native int H5Pget_filter2(int plist, int filter_number, int[] flags, 
+        long[] cd_nelmts, int[] cd_values, long namelen, String[] name, int[] filter_config)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, 
+        NullPointerException;
+
+/**
+ * H5Pset_filter adds the specified filter and corresponding properties to
+ * the end of an output filter pipeline.
+ * 
+ * @param plist
+ *            IN: Property list identifier.
+ * @param filter
+ *            IN: Filter to be added to the pipeline.
+ * @param flags
+ *            IN: Bit vector specifying certain general properties of the
+ *            filter.
+ * @param cd_nelmts
+ *            IN: Number of elements in cd_values
+ * @param cd_values
+ *            IN: Auxiliary data for the filter.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_filter(int plist, int filter,
+        int flags, long cd_nelmts, int[] cd_values)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_filter_by_id returns information about the filter specified in filter_id, a 
+ * filter identifier. plist_id must be a dataset or group creation property list and 
+ * filter_id must be in the associated filter pipeline. The filter_id and flags parameters 
+ * are used in the same manner as described in the discussion of H5Pset_filter. Aside from 
+ * the fact that they are used for output, the parameters cd_nelmts and cd_values[] are 
+ * used in the same manner as described in the discussion of H5Pset_filter. On input, the 
+ * cd_nelmts parameter indicates the number of entries in the cd_values[] array allocated 
+ * by the calling program; on exit it contains the number of values defined by the filter.
+ * On input, the namelen parameter indicates the number of characters allocated for the 
+ * filter name by the calling program in the array name[]. On exit name[] contains the name 
+ * of the filter with one character of the name in each element of the array. If the filter 
+ * specified in filter_id is not set for the property list, an error will be returned and 
+ * H5Pget_filter_by_id1 will fail.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Pget_filter_by_id(int, int, int[], long[], int[], long, String[], int[]) }
+ * 
+ * @param plist_id          IN: Property list identifier.
+ * @param filter_id         IN: Filter identifier.
+ * @param flags            OUT: Bit vector specifying certain general properties of the
+ *                                 filter.
+ * @param cd_nelmts     IN/OUT: Number of elements in cd_values
+ * @param cd_values        OUT: Auxiliary data for the filter.
+ * @param namelen           IN: Anticipated number of characters in name.
+ * @param name             OUT: Name of the filter.
+ * 
+ * @return the filter identification number if successful. Otherwise returns
+ *         H5Z_FILTER_ERROR (-1).
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Fatal error on Copyback
+ * @exception ArrayStoreException
+ *                Fatal error on Copyback
+ * @exception NullPointerException
+ *                - name or an array is null.
+ * 
+ **/
+ at Deprecated
+public synchronized static native int H5Pget_filter_by_id(int plist_id,
+        int filter_id, int[] flags, long[] cd_nelmts, int[] cd_values,
+        long namelen, String[] name)
+        throws HDF5LibraryException, NullPointerException;
+/**
+ * H5Pget_filter_by_id returns information about the filter specified in filter_id, a 
+ * filter identifier. plist_id must be a dataset or group creation property list and 
+ * filter_id must be in the associated filter pipeline. The filter_id and flags parameters 
+ * are used in the same manner as described in the discussion of H5Pset_filter. Aside from 
+ * the fact that they are used for output, the parameters cd_nelmts and cd_values[] are 
+ * used in the same manner as described in the discussion of H5Pset_filter. On input, the 
+ * cd_nelmts parameter indicates the number of entries in the cd_values[] array allocated 
+ * by the calling program; on exit it contains the number of values defined by the filter.
+ * On input, the namelen parameter indicates the number of characters allocated for the 
+ * filter name by the calling program in the array name[]. On exit name[] contains the name 
+ * of the filter with one character of the name in each element of the array. If the filter 
+ * specified in filter_id is not set for the property list, an error will be returned and 
+ * H5Pget_filter_by_id1 will fail.
+ * 
+ * @param plist_id         IN: Property list identifier.
+ * @param filter_id        IN: Filter identifier.
+ * @param flags           OUT: Bit vector specifying certain general properties of the
+ *                                 filter.
+ * @param cd_nelmts     N/OUT: Number of elements in cd_values
+ * @param cd_values       OUT: Auxiliary data for the filter.
+ * @param namelen          IN: Anticipated number of characters in name.
+ * @param name            OUT: Name of the filter.
+ * @param filter_config   OUT: A bit field encoding the returned filter information 
+ * 
+ * @return the filter identification number if successful. Otherwise returns
+ *         H5Z_FILTER_ERROR (-1).
+ * 
+ * @exception ArrayIndexOutOfBoundsException
+ *                Fatal error on Copyback
+ * @exception ArrayStoreException
+ *                Fatal error on Copyback
+ * @exception NullPointerException
+ *                - name or an array is null.
+ * 
+ **/
+public static int H5Pget_filter_by_id(int plist_id,
+        int filter_id, int[] flags, long[] cd_nelmts, int[] cd_values,
+        long namelen, String[] name, int[] filter_config)
+        throws ArrayIndexOutOfBoundsException, ArrayStoreException,
+        HDF5LibraryException, NullPointerException
+{
+    return H5Pget_filter_by_id2(plist_id, filter_id, flags, cd_nelmts, cd_values,
+            namelen, name, filter_config);
+}
+/**
+ * H5Pget_filter_by_id2 returns information about a filter, specified by its filter
+ * id, in a filter pipeline, specified by the property list with which
+ * it is associated.
+ * 
+ * @see #H5Pget_filter_by_id
+ **/
+public synchronized static native int H5Pget_filter_by_id2(int plist_id,
+        int filter_id, int[] flags, long[] cd_nelmts, int[] cd_values,
+        long namelen, String[] name, int[] filter_config)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_gc_references Returns the current setting for the garbage
+ * collection refernces property from a file access property list.
+ * <p>
+ * Note: this routine changed name with HDF5.1.2.2. If using an earlier
+ * version, use 'configure --enable-hdf5_1_2_1' so this routine will link to
+ * the old name.
+ * 
+ * @param fapl_id
+ *            IN File access property list
+ * @param gc_ref
+ *            OUT GC is on (true) or off (false)
+ * 
+ * @return non-negative if succeed
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - array is null.
+ **/
+public synchronized static native int H5Pget_gc_references(int fapl_id,
+        boolean[] gc_ref) throws HDF5LibraryException, NullPointerException;
+public synchronized static native boolean H5Pget_gcreferences(int fapl_id)
+        throws HDF5LibraryException;
+
+/*
+ * Earlier versions of the HDF5 library had a different name. This is
+ * included as an alias.
+ */
+public synchronized static int H5Pget_gc_reference(int fapl_id,
+        boolean[] gc_ref) throws HDF5LibraryException, NullPointerException
+{
+    return H5Pget_gc_references(fapl_id, gc_ref);
+}
+
+/**
+ * H5Pset_gc_references Sets the flag for garbage collecting references for
+ * the file. Default value for garbage collecting references is off.
+ * 
+ * @param fapl_id
+ *            IN File access property list
+ * @param gc_ref
+ *            IN set GC on (true) or off (false)
+ * 
+ * @return non-negative if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_gc_references(int fapl_id,
+        boolean gc_ref) throws HDF5LibraryException;
+
+public synchronized static native int H5Pget_hyper_vector_size(int dxpl_id,
+        long[] vector_size)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_hyper_vector_size(int dxpl_id,
+        long vector_size) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pget_istore_k queries the 1/2 rank of an indexed storage B-tree.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @param ik
+ *            OUT: Pointer to location to return the chunked storage B-tree
+ *            1/2 rank.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - ik array is null.
+ **/
+public synchronized static native int H5Pget_istore_k(int plist, int[] ik)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pset_istore_k sets the size of the parameter used to control the
+ * B-trees for indexing chunked datasets.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @param ik
+ *            IN: 1/2 rank of chunked storage B-tree.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_istore_k(int plist, int ik)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_layout returns the layout of the raw data for a dataset.
+ * 
+ * @param plist
+ *            IN: Identifier for property list to query.
+ * 
+ * @return the layout type of a dataset creation property list if
+ *         successful. Otherwise returns H5D_LAYOUT_ERROR (-1).
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pget_layout(int plist)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_layout sets the type of storage used store the raw data for a
+ * dataset.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @param layout
+ *            IN: Type of storage layout for raw data.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_layout(int plist, int layout)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_libver_bounds retrieves the lower and upper bounds on the HDF5 Library versions that indirectly determine the object formats versions used when creating objects in the file.
+* @param fapl_id     IN: File access property list identifier
+* @param libver 
+*              The earliest/latest version of the library that will be used for writing objects.
+*
+*      <pre>
+*      libver[0] =  The earliest version of the library that will be used for writing objects
+*      libver[1] =  The latest version of the library that will be used for writing objects.
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - size is null.
+*  
+**/
+public synchronized static native int H5Pget_libver_bounds(int fapl_id, int []libver) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_libver_bounds Sets bounds on library versions, and indirectly format versions, to be used when creating objects
+* @param fapl_id   IN: File access property list identifier
+* @param low       IN: The earliest version of the library that will be used for writing objects
+* @param high      IN: The latest version of the library that will be used for writing objects.
+* 
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception IllegalArgumentException - Argument is Illegal
+*  
+**/
+public synchronized static native int H5Pset_libver_bounds(int fapl_id, int low, int high)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pget_link_creation_order queries the group creation property list, gcpl_id, and returns a flag indicating whether link creation order is tracked and/or indexed in a group.  
+* @param gcpl_id      IN: Group creation property list identifier
+* 
+* @return crt_order_flags -Creation order flag(s)
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_link_creation_order(int gcpl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_link_creation_order Sets flags in a group creation property list, gcpl_id, for tracking and/or indexing links on creation order. 
+* @param gcpl_id                  IN: Group creation property list identifier
+* @param crt_order_flags          IN: Creation order flag(s)
+* 
+* 
+* @return Returns a non-negative value if successful; otherwise returns a negative value. 
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_link_creation_order(int gcpl_id, int crt_order_flags)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_link_phase_change Queries the settings for conversion between compact and dense groups.
+* @param gcpl_id      IN: Group creation property list identifier
+* @param links 
+*               The max. no. of compact links & the min. no. of dense
+*               links, which are used for storing groups
+*
+*      <pre>
+*      links[0] =  The maximum number of links for compact storage
+*      links[1] =  The minimum number of links for dense storage
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - size is null.
+*  
+**/
+public synchronized static native int H5Pget_link_phase_change(int gcpl_id, int []links) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_link_phase_change Sets the parameters for conversion between compact and dense groups. 
+* @param gcpl_id                IN: Group creation property list identifier
+* @param max_compact            IN: Maximum number of links for compact storage(Default: 8) 
+* @param min_dense              IN: Minimum number of links for dense storage(Default: 6)
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception IllegalArgumentException - Invalid values of max_compact and min_dense.
+*  
+**/
+public synchronized static native int H5Pset_link_phase_change(int gcpl_id, int max_compact, int min_dense)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pget_local_heap_size_hint Retrieves the anticipated size of the local heap for original-style groups.  
+* @param gcpl_id                IN: Group creation property list identifier
+*  
+* @return size_hint, the anticipated size of local heap
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native long H5Pget_local_heap_size_hint(int gcpl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_local_heap_size_hint Specifies the anticipated maximum size of a local heap. 
+* @param gcpl_id              IN: Group creation property list identifier
+* @param size_hint            IN: Anticipated maximum size in bytes of local heap
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_local_heap_size_hint(int gcpl_id, long size_hint)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_mdc_config gets the initial metadata cache configuration contained in a 
+ * file access property list and loads it into the instance of H5AC_cache_config_t 
+ * pointed to by the config_ptr parameter. This configuration is used when the file is opened. 
+ * 
+ * @param plist_id            IN: Identifier of the file access property list. 
+ * 
+ * @return  A buffer(H5AC_cache_config_t) for the current metadata cache configuration information 
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native H5AC_cache_config_t H5Pget_mdc_config(int plist_id) 
+        throws HDF5LibraryException;
+public synchronized static native void H5Pset_mdc_config(int plist_id, H5AC_cache_config_t config_ptr) 
+        throws HDF5LibraryException; 
+
+/**
+ * H5Pget_meta_block_size the current metadata block size setting. 
+ * @param fapl_id                 IN: File access property list identifier
+ *  
+ * @return the minimum size, in bytes, of metadata block allocations.
+ *  
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  
+ **/
+public synchronized static native long H5Pget_meta_block_size(int fapl_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_meta_block_size sets the minimum metadata block size. 
+ * @param fapl_id             IN: File access property list identifier
+ * @param size                IN: Minimum size, in bytes, of metadata block allocations.
+ *  
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  
+ **/
+public synchronized static native void H5Pset_meta_block_size(int fapl_id, long size)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_nfilters returns the number of filters defined in the filter
+ * pipeline associated with the property list plist.
+ * 
+ * @param plist
+ *            IN: Property list identifier.
+ * 
+ * @return the number of filters in the pipeline if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pget_nfilters(int plist)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_nlinks retrieves the maximum number of soft or user-defined link traversals allowed, nlinks, before the library assumes it has found a cycle and aborts the traversal. This value is retrieved from the link access property list lapl_id.
+* @param lapl_id     IN: File access property list identifier
+* 
+* @return Returns a Maximum number of links to traverse. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native long H5Pget_nlinks(int lapl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_nlinks sets the maximum number of soft or user-defined link traversals allowed, nlinks, before the library assumes it has found a cycle and aborts the traversal. This value is set in the link access property list lapl_id. 
+* @param lapl_id    IN: File access property list identifier
+* @param nlinks     IN: Maximum number of links to traverse
+* 
+* @return Returns a non-negative value if successful; otherwise returns a negative value. 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Argument is Illegal
+*  
+**/
+public synchronized static native int H5Pset_nlinks(int lapl_id, long nlinks)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * H5Pget_nprops retrieves the number of properties in a property list or
+ * class
+ * 
+ * @param plid
+ *            IN: Identifier of property object to query
+ * @return number of properties if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native long H5Pget_nprops(int plid)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_preserve checks the status of the dataset transfer property list.
+ * 
+ * @deprecated As of HDF5 1.8, compound datatype field preservation is now core functionality in the HDF5 Library.
+ * 
+ * @param plist
+ *            IN: Identifier for the dataset transfer property list.
+ * 
+ * @return TRUE or FALSE if successful; otherwise returns a negative value
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+ at Deprecated
+public synchronized static native int H5Pget_preserve(int plist)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_preserve sets the dataset transfer property list status to TRUE or
+ * FALSE.
+ * 
+ * @deprecated As of HDF5 1.8, compound datatype field preservation is now core functionality in the HDF5 Library.
+ * 
+ * @param plist
+ *            IN: Identifier for the dataset transfer property list.
+ * @param status
+ *            IN: Status of for the dataset transfer property list.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ *                - plist is invalid.
+ **/
+ at Deprecated
+public synchronized static native int H5Pset_preserve(int plist,
+        boolean status)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+ * H5Pget_obj_track_times queries the object creation property list, ocpl_id, 
+ * to determine whether object times are being recorded. 
+ * 
+ * @param ocpl_id   IN: Object creation property list identifier
+ * 
+ * @return TRUE or FALSE, specifying whether object times are being recorded
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * 
+ **/
+public synchronized static native boolean H5Pget_obj_track_times(int ocpl_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_obj_track_times sets a property in the object creation property list, ocpl_id, 
+ * that governs the recording of times associated with an object. 
+ * 
+ * @param ocpl_id     IN: Object creation property list identifier
+ * 
+ * @param track_times IN: TRUE or FALSE, specifying whether object times are to be tracked 
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * 
+ **/
+public synchronized static native void H5Pset_obj_track_times(int ocpl_id, boolean track_times)
+        throws HDF5LibraryException;
+
+/**
+* H5Pget_shared_mesg_index Retrieves the configuration settings for a shared message index.  
+* @param fcpl_id          IN: File creation property list identifier 
+* @param index_num        IN: Index being configured.
+* @param mesg_info
+*               The message type and minimum message size            
+*
+*      <pre>
+*      mesg_info[0] =  Types of messages that may be stored in this index.
+*      mesg_info[1] =  Minimum message size.  
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - mesg_info is null.
+* @exception IllegalArgumentException - Invalid value of nindexes
+*  
+**/
+public synchronized static native int H5Pget_shared_mesg_index(int fcpl_id, int index_num, int[] mesg_info) 
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+* H5Pset_shared_mesg_index Configures the specified shared object header message index 
+* @param fcpl_id                IN: File creation property list identifier.
+* @param index_num              IN: Index being configured.
+* @param mesg_type_flags        IN: Types of messages that should be stored in this index.
+* @param min_mesg_size          IN: Minimum message size.
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Invalid value of nindexes
+*  
+**/
+public synchronized static native int H5Pset_shared_mesg_index(int fcpl_id, int index_num, int mesg_type_flags, int min_mesg_size)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pget_shared_mesg_nindexes retrieves number of shared object header message indexes in file creation property list. 
+* @param fcpl_id            IN: : File creation property list identifier 
+*
+* @return nindexes, the number of shared object header message indexes available in files created with this property list 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pget_shared_mesg_nindexes(int fcpl_id) 
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_shared_mesg_nindexes sets the number of shared object header message indexes in the specified file creation property list. 
+* @param plist_id                IN: File creation property list 
+* @param nindexes                IN: Number of shared object header message indexes to be available in files created with this property list
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Invalid value of nindexes
+*  
+**/
+public synchronized static native int H5Pset_shared_mesg_nindexes(int plist_id, int nindexes)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+/**
+* H5Pget_shared_mesg_phase_change retrieves shared object header message phase change information. 
+* @param fcpl_id            IN: : File creation property list identifier 
+* @param size
+*               The threshold values for storage of shared object header 
+*               message indexes in a file.
+*
+*      <pre>
+*      size[0] =  Threshold above which storage of a shared object header message index shifts from list to B-tree 
+*      size[1] =  Threshold below which storage of a shared object header message index reverts to list format  
+*      </pre>
+*      
+* @return Returns a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - size is null.
+*  
+**/
+public synchronized static native int H5Pget_shared_mesg_phase_change(int fcpl_id, int[] size) 
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_shared_mesg_phase_change sets shared object header message storage phase change thresholds. 
+* @param fcpl_id                IN: File creation property list identifier
+* @param max_list                IN: Threshold above which storage of a shared object header message index shifts from list to B-tree
+* @param min_btree                IN: Threshold below which storage of a shared object header message index reverts to list format
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Invalid values of max_list and min_btree.
+*  
+**/
+public synchronized static native int H5Pset_shared_mesg_phase_change(int fcpl_id, int max_list, int min_btree)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+public synchronized static native long H5Pget_sieve_buf_size(int fapl_id)
+        throws HDF5LibraryException;
+public synchronized static native void H5Pset_sieve_buf_size(int fapl_id, long size) 
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_size retrieves the size of a property's value in bytes
+ * 
+ * @param plid
+ *            IN: Identifier of property object to query
+ * @param name
+ *            IN: Name of property to query
+ * @return size of a property's value if successful; a negative value if
+ *         failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native long H5Pget_size(int plid, String name)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_sizes retrieves the size of the offsets and lengths used in an
+ * HDF5 file. This function is only valid for file creation property lists.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to query.
+ * @param size
+ *            OUT: the size of the offsets and length.
+ * 
+ *            <pre>
+ *      size[0] = sizeof_addr // offset size in bytes
+ *      size[1] = sizeof_size // length size in bytes
+ * </pre>
+ * @return a non-negative value with the sizes initialized; if successful;
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - size is null.
+ * @exception IllegalArgumentException
+ *                - size is invalid.
+ **/
+public synchronized static native int H5Pget_sizes(int plist, long[] size)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Pset_sizes sets the byte size of the offsets and lengths used to
+ * address objects in an HDF5 file.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to modify.
+ * @param sizeof_addr
+ *            IN: Size of an object offset in bytes.
+ * @param sizeof_size
+ *            IN: Size of an object length in bytes.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_sizes(int plist,
+        int sizeof_addr, int sizeof_size) throws HDF5LibraryException;
+
+/**
+ * H5Pget_small_data_block_size retrieves the size of a block of small data
+ * in a file creation property list.
+ * 
+ * @param plist
+ *            IN: Identifier for property list to query.
+ * @param size
+ *            OUT: Pointer to location to return block size.
+ * 
+ * @return a non-negative value and the size of the user block; if
+ *         successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - size is null.
+ **/
+public synchronized static native int H5Pget_small_data_block_size(
+        int plist, long[] size)
+        throws HDF5LibraryException, NullPointerException;
+public synchronized static native long H5Pget_small_data_block_size_long(
+        int plist)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pset_small_data_block_size reserves blocks of size bytes for the
+ * contiguous storage of the raw data portion of small datasets.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to modify.
+ * @param size
+ *            IN: Size of the blocks in bytes.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_small_data_block_size(
+        int plist, long size) throws HDF5LibraryException;
+
+/**
+ * H5Pget_sym_k retrieves the size of the symbol table B-tree 1/2 rank and
+ * the symbol table leaf node 1/2 size.
+ * 
+ * @param plist
+ *            IN: Property list to query.
+ * @param size
+ *            OUT: the symbol table's B-tree 1/2 rank and leaf node 1/2size.
+ * 
+ *            <pre>
+ *      size[0] = ik // the symbol table's B-tree 1/2 rank
+ *      size[1] = lk // leaf node 1/2 size
+ * </pre>
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - size is null.
+ * @exception IllegalArgumentException
+ *                - size is invalid.
+ **/
+public synchronized static native int H5Pget_sym_k(int plist, int[] size)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+ * H5Pset_sym_k sets the size of parameters used to control the symbol table
+ * nodes.
+ * 
+ * @param plist
+ *            IN: Identifier for property list to query.
+ * @param ik
+ *            IN: Symbol table tree rank.
+ * @param lk
+ *            IN: Symbol table node size.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_sym_k(int plist, int ik, int lk)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_userblock retrieves the size of a user block in a file creation
+ * property list.
+ * 
+ * @param plist
+ *            IN: Identifier for property list to query.
+ * @param size
+ *            OUT: Pointer to location to return user-block size.
+ * 
+ * @return a non-negative value and the size of the user block; if
+ *         successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - size is null.
+ **/
+public synchronized static native int H5Pget_userblock(int plist,
+        long[] size) throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Pset_userblock sets the user block size of a file creation property
+ * list.
+ * 
+ * @param plist
+ *            IN: Identifier of property list to modify.
+ * @param size
+ *            IN: Size of the user-block in bytes.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_userblock(int plist, long size)
+        throws HDF5LibraryException;
+
+/**
+ * H5Pget_version retrieves the version information of various objects for a
+ * file creation property list.
+ * 
+ * @param plist
+ *            IN: Identifier of the file creation property list.
+ * @param version_info
+ *            OUT: version information.
+ * 
+ *            <pre>
+ *      version_info[0] = boot  // boot block version number
+ *      version_info[1] = freelist  // global freelist version
+ *      version_info[2] = stab  // symbol tabl version number
+ *      version_info[3] = shhdr  // hared object header version
+ * </pre>
+ * @return a non-negative value, with the values of version_info
+ *         initialized, if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - version_info is null.
+ * @exception IllegalArgumentException
+ *                - version_info is illegal.
+ **/
+public synchronized static native int H5Pget_version(int plist, int[] version_info)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+ * H5Pisa_class checks to determine whether a property list is a member of
+ * the specified class
+ * 
+ * @param plist
+ *            IN: Identifier of the property list
+ * @param pclass
+ *            IN: Identifier of the property class
+ * @return a positive value if equal; zero if unequal; a negative value if
+ *         failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Pisa_class(int plist, int pclass)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Pmodify_filter(int plist,
+        int filter, int flags, long cd_nelmts, int[] cd_values)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Punregister removes a property from a property list class
+ * 
+ * @param plid
+ *            IN: Property list class from which to remove permanent
+ *            property
+ * @param name
+ *            IN: Name of property to remove
+ * @return a non-negative value if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Punregister(int plid, String name)
+        throws HDF5LibraryException;
+
+/**
+ * H5Premove removes a property from a property list
+ * 
+ * @param plid
+ *            IN: Identifier of the property list to modify
+ * @param name
+ *            IN: Name of property to remove
+ * @return a non-negative value if successful; a negative value if failed
+ * @throws HDF5LibraryException
+ */
+public synchronized static native int H5Premove(int plid, String name)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Premove_filter(int obj_id,
+        int filter) throws HDF5LibraryException;
+
+/**
+ * H5Pset_deflate sets the compression method for a dataset.
+ * 
+ * @param plist
+ *            IN: Identifier for the dataset creation property list.
+ * @param level
+ *            IN: Compression level.
+ * 
+ * @return non-negative if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Pset_deflate(int plist, int level)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Pset_fapl_log Sets up the logging virtual file driver (H5FD_LOG) for use.
+ *  H5Pset_fapl_log modifies the file access property list to use the logging driver, H5FD_LOG. 
+ *  The logging virtual file driver (VFD) is a clone of the standard SEC2 (H5FD_SEC2) driver 
+ *  with additional facilities for logging VFD metrics and activity to a file. 
+ *
+ *  @deprecated As of HDF5 1.8.7, replaced by {@link #H5Pset_fapl_log(int, String, long, long)}
+ *
+ *  @param fapl_id  IN: File access property list identifier. 
+ *  @param logfile  IN: logfile is the name of the file in which the logging entries are to be recorded.
+ *  @param flags    IN: Flags specifying the types of logging activity.
+ *  @param buf_size IN: The size of the logging buffers, in bytes.
+ *
+ *  @return a non-negative value if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - logfile is null.
+ **/
+ at Deprecated
+public static int H5Pset_fapl_log(int fapl_id,
+        String logfile, int flags, int buf_size)
+        throws HDF5LibraryException, NullPointerException
+{
+    H5Pset_fapl_log(fapl_id, logfile, (long)flags, (long)buf_size);
+    return 1;
+}
+
+/**
+ *  H5Pset_fapl_log Sets up the logging virtual file driver (H5FD_LOG) for use.
+ *  H5Pset_fapl_log modifies the file access property list to use the logging driver, H5FD_LOG. 
+ *  The logging virtual file driver (VFD) is a clone of the standard SEC2 (H5FD_SEC2) driver 
+ *  with additional facilities for logging VFD metrics and activity to a file. 
+ *
+ *  @param fapl_id  IN: File access property list identifier. 
+ *  @param logfile  IN: logfile is the name of the file in which the logging entries are to be recorded.
+ *  @param flags    IN: Flags specifying the types of logging activity.
+ *  @param buf_size IN: The size of the logging buffers, in bytes.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - logfile is null.
+ **/
+public synchronized static native void H5Pset_fapl_log(int fapl_id,
+        String logfile, long flags, long buf_size)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fapl_sec2(int fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native void H5Pset_fapl_split(int fapl_id, 
+        String meta_ext, int meta_plist_id, String raw_ext, int raw_plist_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fapl_stdio(int fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fapl_windows(int fapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_fletcher32(int plist)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+* H5Pset_nbit Sets up the use of the N-Bit filter.  
+* @param plist_id                IN: Dataset creation property list identifier.
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  
+**/
+public synchronized static native int H5Pset_nbit(int plist_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Pset_scaleoffset sets the Scale-Offset filter for a dataset.   
+* @param plist_id                IN: Dataset creation property list identifier.
+* @param scale_type            IN: Flag indicating compression method.
+* @param scale_factor            IN: Parameter related to scale.
+*  
+* @return a non-negative value if successful; otherwise returns a negative value.
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception IllegalArgumentException - Invalid arguments
+*  
+**/
+public synchronized static native int H5Pset_scaleoffset(int plist_id, int scale_type, int scale_factor)
+        throws HDF5LibraryException, IllegalArgumentException;
+
+public synchronized static native int H5Pset_shuffle(int plist_id)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Pset_szip(int plist,
+        int options_mask, int pixels_per_block)
+        throws HDF5LibraryException, NullPointerException;
+
+///////// unimplemented ////////
+
+//herr_t H5Padd_merge_committed_dtype_path(hid_t plist_id, const char *path);
+
+//hid_t H5Pcreate_class( hid_t parent_class, const char *name, H5P_cls_create_func_t create, void *create_data, H5P_cls_copy_func_t copy, void *copy_data, H5P_cls_close_func_t close, void *close_data ) 
+
+//herr_t H5Pfree_merge_committed_dtype_paths(hid_t plist_id);
+
+//void *H5Pget_driver_info( hid_t plist_id ) 
+
+//herr_t H5Pget_elink_cb( hid_t lapl_id, H5L_elink_traverse_t *func, void **op_data ) 
+//herr_t H5Pset_elink_cb( hid_t lapl_id, H5L_elink_traverse_t func, void *op_data ) 
+
+//herr_t H5Pget_file_image(hid_t fapl_id, void **buf_ptr_ptr, size_t *buf_len_ptr);
+//herr_t H5Pset_file_image(hid_t fapl_id, void *buf_ptr, size_t buf_len);
+
+//herr_t H5Pget_file_image_callbacks(hid_t fapl_id,
+//       H5FD_file_image_callbacks_t *callbacks_ptr);
+//herr_t H5Pset_file_image_callbacks(hid_t fapl_id,
+//       H5FD_file_image_callbacks_t *callbacks_ptr);
+
+//herr_t H5Pget_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t *func, void **op_data);
+//herr_t H5Pset_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t func, void *op_data);
+
+//herr_t H5Pget_multi_type ( hid_t fapl_id, H5FD_mem_t *type ) 
+//herr_t H5Pset_multi_type ( hid_t fapl_id, H5FD_mem_t type ) 
+
+//herr_t H5Pget_type_conv_cb(hid_t plist, H5T_conv_except_func_t *func, void **op_data) 
+//herr_t H5Pset_type_conv_cb( hid_t plist, H5T_conv_except_func_t func, void *op_data) 
+
+//herr_t H5Pget_vlen_mem_manager(hid_t plist, H5MM_allocate_t *alloc, void **alloc_info, H5MM_free_t *free, void **free_info ) 
+//herr_t H5Pset_vlen_mem_manager(hid_t plist, H5MM_allocate_t alloc, void *alloc_info, H5MM_free_t free, void *free_info ) 
+
+//herr_t H5Pinsert( hid_t plid, const char *name, size_t size, void *value, H5P_prp_set_func_t set, H5P_prp_get_func_t get, H5P_prp_delete_func_t delete, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close )] 
+//herr_t H5Pinsert2( hid_t plid, const char *name, size_t size, void *value, H5P_prp_set_func_t set, H5P_prp_get_func_t get, H5P_prp_delete_func_t delete, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close ) 
+
+//int H5Piterate( hid_t id, int * idx, H5P_iterate_t iter_func, void * iter_data ) 
+
+//herr_t H5Pregister( hid_t class, const char * name, size_t size, void * default, H5P_prp_create_func_t create, H5P_prp_set_func_t set, H5P_prp_get_func_t get, H5P_prp_delete_func_t delete, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close )   
+//herr_t H5Pregister2( hid_t class, const char * name, size_t size, void * default, H5P_prp_create_func_t create, H5P_prp_set_func_t set, H5P_prp_get_func_t get, H5P_prp_delete_func_t delete, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close )
+
+//herr_t H5Pset_attr_phase_change( hid_t ocpl_id, unsigned max_compact, unsigned min_dense ) 
+
+//herr_t H5Pset_buffer(hid_t plist_id, size_t size, void *tconv, void *bkg);
+
+//herr_t H5Pset_driver( hid_t plist_id, hid_t new_driver_id, const void *new_driver_info ) 
+
+//herr_t H5Pset_filter_callback(hid_t plist, H5Z_filter_func_t func, void *op_data) 
+
+//herr_t H5Pget_dxpl_mpio( hid_t dxpl_id, H5FD_mpio_xfer_t *xfer_mode ) 
+//herr_t H5Pset_dxpl_mpio( hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode ) 
+//herr_t H5Pset_dxpl_mpio_chunk_opt (hid_t dxpl_id, H5FD_mpio_chunk_opt_t opt_mode) 
+//herr_t H5Pset_dxpl_mpio_chunk_opt_num (hid_t dxpl_id, unsigned num_chunk_per_proc) 
+//herr_t H5Pset_dxpl_mpio_chunk_opt_ratio (hid_t dxpl_id, unsigned percent_proc_per_chunk) 
+//herr_t H5Pset_dxpl_mpio_collective_opt (hid_t dxpl_id, H5FD_mpio_collective_opt_t opt_mode) 
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5R: HDF5 1.8 Reference API Functions                     //
+////
+//////////////////////////////////////////////////////////////
+
+private synchronized static native int H5Rcreate(byte[] ref, int loc_id,
+        String name, int ref_type, int space_id)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+* H5Rcreate creates the reference, ref, of the type specified in ref_type,
+* pointing to the object name located at loc_id.
+* 
+* @param loc_id
+*            IN: Location identifier used to locate the object being
+*            pointed to.
+* @param name
+*            IN: Name of object at location loc_id.
+* @param ref_type
+*            IN: Type of reference.
+* @param space_id
+*            IN: Dataspace identifier with selection.
+* 
+* @return the reference (byte[]) if successful
+* 
+* @exception HDF5LibraryException
+*                - Error from the HDF-5 Library.
+* @exception NullPointerException
+*                - an input array is null.
+* @exception IllegalArgumentException
+*                - an input array is invalid.
+**/
+public synchronized static byte[] H5Rcreate(int loc_id, String name,
+        int ref_type, int space_id)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+{
+    /* These sizes are correct for HDF5.1.2 */
+    int ref_size = 8;
+    if (ref_type == HDF5Constants.H5R_DATASET_REGION) {
+        ref_size = 12;
+    }
+    byte rbuf[] = new byte[ref_size];
+
+    /* will raise an exception if fails */
+    H5Rcreate(rbuf, loc_id, name, ref_type, space_id);
+
+    return rbuf;
+}
+
+/**
+ * Given a reference to some object, H5Rdereference opens that object and
+ * return an identifier.
+ * 
+ * @param dataset
+ *            IN: Dataset containing reference object.
+ * @param ref_type
+ *            IN: The reference type of ref.
+ * @param ref
+ *            IN: reference to an object
+ * 
+ * @return valid identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - output array is null.
+ * @exception IllegalArgumentException
+ *                - output array is invalid.
+ **/
+public static int H5Rdereference(int dataset, int ref_type, byte[] ref)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException
+{
+    int id = _H5Rdereference(dataset, ref_type, ref);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Rdereference(int dataset,
+        int ref_type, byte[] ref)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+* H5Rget_name retrieves a name for the object identified by ref.
+* @param loc_id     IN: Identifier for the dataset containing the reference or for the group that dataset is in.
+* @param ref_type         IN: Type of reference.
+* @param ref     IN: An object or dataset region reference.
+* @param name     OUT: A name associated with the referenced object or dataset region.
+* @param size     IN: The size of the name buffer.
+* 
+* @return Returns the length of the name if successful, returning 0 (zero) if no name is associated with the identifier. Otherwise returns a negative value. 
+* 
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - size is null.
+* @exception IllegalArgumentException - Argument is illegal.
+*  
+**/
+public synchronized static native long H5Rget_name( int loc_id, int ref_type, byte[] ref, String[] name, long size)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/*
+ * [NOTE: This function is only supported in HDF5 Releases 1.4.x. It has
+ * been replaced in Release 1.6 by the function H5Rget_obj_type public
+ * synchronized static native int H5Rget_object_type(int loc_id, byte ref[])
+ * throws HDF5LibraryException, NullPointerException,
+ * IllegalArgumentException;
+ */
+/**
+ * Given a reference to an object ref, H5Rget_obj_type returns the type of
+ * the object pointed to.
+ * 
+ * @deprecated As of HDF5 1.8, replaced by {@link #H5Rget_obj_type(int, int, byte[], int[]) }
+ * 
+ * @param loc_id
+ *            IN: loc_id of the reference object.
+ * @param ref_type
+ *            IN: Type of reference to query. *
+ * @param ref
+ *            IN: the reference
+ * 
+ * @return a valid object type if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - array is null.
+ * @exception IllegalArgumentException
+ *                - array is invalid.
+ **/
+ at Deprecated
+public synchronized static native int H5Rget_obj_type(int loc_id,
+        int ref_type, byte ref[])
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Rget_obj_type Given a reference to an object ref, H5Rget_obj_type returns the type of
+ * the object pointed to.
+ * 
+ * @param loc_id        IN: loc_id of the reference object.
+ * @param ref_type        IN: Type of reference to query. 
+ * @param ref            IN: the reference
+ * @param obj_type        OUT:Type of referenced object
+ * 
+ * @return Returns the object type, which is the same as obj_type[0]. The return value is
+ *         the same as the HDF5 1.6 version. 
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - array is null.
+ * @exception IllegalArgumentException
+ *                - array is invalid.
+ **/
+public static int H5Rget_obj_type(int loc_id,
+        int ref_type, byte ref[], int [] obj_type)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException
+{
+    return H5Rget_obj_type2(loc_id, ref_type, ref, obj_type);
+}
+
+/**
+ * H5Rget_obj_type2 Retrieves the type of object that an object reference points to. 
+ * 
+ * @see public static int H5Rget_obj_type(int loc_id, int ref_type, byte ref[], int [] obj_type)
+ **/
+private synchronized static native int H5Rget_obj_type2(int loc_id,
+        int ref_type, byte ref[], int [] obj_type)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+ * Given a reference to an object ref, H5Rget_region creates a copy of the
+ * dataspace of the dataset pointed to and defines a selection in the copy
+ * which is the region pointed to.
+ * 
+ * @param loc_id
+ *            IN: loc_id of the reference object.
+ * @param ref_type
+ *            IN: The reference type of ref.
+ * @param ref
+ *            OUT: the reference to the object and region
+ * 
+ * @return a valid identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - output array is null.
+ * @exception IllegalArgumentException
+ *                - output array is invalid.
+ **/
+public static int H5Rget_region(int loc_id, int ref_type, byte[] ref)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException
+{
+    int id = _H5Rget_region(loc_id, ref_type, ref);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Rget_region(int loc_id,
+        int ref_type, byte[] ref)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+//////////////////////////////////////////////////////////////
+//                                                          //
+//H5S: Dataspace Interface Functions                        //
+//                                                          //
+//////////////////////////////////////////////////////////////
+
+/**
+ * H5Sclose releases a dataspace.
+ * 
+ * @param space_id
+ *            Identifier of dataspace to release.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Sclose(int space_id) throws HDF5LibraryException
+{
+    if (space_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(space_id);
+    return _H5Sclose(space_id);
+}
+
+private synchronized static native int _H5Sclose(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Scopy creates a new dataspace which is an exact copy of the dataspace
+ * identified by space_id.
+ * 
+ * @param space_id
+ *            Identifier of dataspace to copy.
+ * @return a dataspace identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Scopy(int space_id) throws HDF5LibraryException
+{
+    int id = _H5Scopy(space_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Scopy(int space_id)
+        throws HDF5LibraryException;
+
+/**
+*  H5Screate creates a new dataspace of a particular type.
+*
+*  @param type IN: The type of dataspace to be created.
+
+*  @return a dataspace identifier
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public static int H5Screate(int type) throws HDF5LibraryException
+{
+    int id = _H5Screate(type);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Screate(int type)
+     throws HDF5LibraryException;
+
+/**
+ *  H5Screate_simple creates a new simple data space and opens
+ *  it for access.
+ *
+ *  @param rank    IN: Number of dimensions of dataspace.
+ *  @param dims    IN: An array of the size of each dimension.
+ *  @param maxdims IN: An array of the maximum size of each dimension.
+ *
+ *  @return a dataspace identifier
+ *
+ *  @exception HDF5Exception - Error from the HDF-5 Library.
+ *  @exception NullPointerException - dims or maxdims is null.
+ **/
+public static int H5Screate_simple(int rank, long[] dims, long[] maxdims)
+throws HDF5Exception, NullPointerException
+{
+    int id = _H5Screate_simple(rank, dims, maxdims);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Screate_simple(int rank, long[] dims,
+        long[] maxdims) throws HDF5Exception, NullPointerException;
+
+/**
+ *  @deprecated use H5Screate_simple(int rank, long[] dims, long[] maxdims)
+ **/
+ at Deprecated
+public static int H5Screate_simple(int rank, byte[] dims, byte[] maxdims)
+        throws HDF5Exception, NullPointerException
+{
+    ByteBuffer dimsbb = ByteBuffer.wrap(dims);
+    long[] ladims = (dimsbb.asLongBuffer()).array();
+    ByteBuffer maxdimsbb = ByteBuffer.wrap(maxdims);
+    long[] lamaxdims = (maxdimsbb.asLongBuffer()).array();
+
+    int id = _H5Screate_simple(rank, ladims, lamaxdims);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+/**
+ *  H5Sdecode reconstructs the HDF5 data space object and returns a 
+ *  new object handle for it.
+ *
+ *  @param buf   IN: Buffer for the data space object to be decoded.
+ *
+ *  @return a new object handle
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native int H5Sdecode(byte[] buf)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Sencode converts a data space description into binary form in a buffer.
+ *
+ *  @param obj_id   IN: Identifier of the object to be encoded.
+ *
+ *  @return the buffer for the object to be encoded into.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native byte[] H5Sencode(int obj_id)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sextent_copy copies the extent from source_space_id to dest_space_id.
+ * This action may change the type of the dataspace.
+ * 
+ * @param dest_space_id
+ *            IN: The identifier for the dataspace from which the extent is
+ *            copied.
+ * @param source_space_id
+ *            IN: The identifier for the dataspace to which the extent is
+ *            copied.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sextent_copy(int dest_space_id,
+        int source_space_id) throws HDF5LibraryException;
+
+/**
+ * H5Sextent_equal determines whether the dataspace extents of two dataspaces, 
+ * space1_id and space2_id, are equal. 
+ * 
+ * @param first_space_id
+ *            IN: The identifier for the first dataspace.
+ * @param second_space_id
+ *            IN: The identifier for the seconddataspace.
+ * 
+ * @return true if successful, else false
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Sextent_equal(int first_space_id,
+        int second_space_id) throws HDF5LibraryException;
+
+/**
+ * H5Sget_select_bounds retrieves the coordinates of the bounding box
+ * containing the current selection and places them into user-supplied
+ * buffers.
+ * <P>
+ * The start and end buffers must be large enough to hold the dataspace rank
+ * number of coordinates.
+ * 
+ * @param spaceid
+ *            Identifier of dataspace to release.
+ * @param start
+ *            coordinates of lowest corner of bounding box.
+ * @param end
+ *            coordinates of highest corner of bounding box.
+ * 
+ * @return a non-negative value if successful,with start and end
+ *         initialized.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - start or end is null.
+ **/
+public synchronized static native int H5Sget_select_bounds(int spaceid,
+        long[] start, long[] end)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sget_select_elem_npoints returns the number of element points in the
+ * current dataspace selection.
+ * 
+ * @param spaceid
+ *            Identifier of dataspace to release.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Sget_select_elem_npoints(
+        int spaceid) throws HDF5LibraryException;
+
+/**
+ * H5Sget_select_elem_pointlist returns an array of of element points in the
+ * current dataspace selection. The point coordinates have the same
+ * dimensionality (rank) as the dataspace they are located within, one
+ * coordinate per point.
+ * 
+ * @param spaceid
+ *            Identifier of dataspace to release.
+ * @param startpoint
+ *            first point to retrieve
+ * @param numpoints
+ *            number of points to retrieve
+ * @param buf
+ *            returns points startblock to startblock+num-1, each points is
+ *            <i>rank</i> longs.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - buf is null.
+ **/
+public synchronized static native int H5Sget_select_elem_pointlist(
+        int spaceid, long startpoint, long numpoints, long[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sget_select_hyper_blocklist returns an array of hyperslab blocks. The
+ * block coordinates have the same dimensionality (rank) as the dataspace
+ * they are located within. The list of blocks is formatted as follows:
+ * 
+ * <pre>
+ *    <"start" coordinate>, immediately followed by
+ *    <"opposite" corner coordinate>, followed by
+ *   the next "start" and "opposite" coordinates,
+ *   etc.
+ *   until all of the selected blocks have been listed.
+ * </pre>
+ * 
+ * @param spaceid
+ *            Identifier of dataspace to release.
+ * @param startblock
+ *            first block to retrieve
+ * @param numblocks
+ *            number of blocks to retrieve
+ * @param buf
+ *            returns blocks startblock to startblock+num-1, each block is
+ *            <i>rank</i> * 2 (corners) longs.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - buf is null.
+ **/
+public synchronized static native int H5Sget_select_hyper_blocklist(
+        int spaceid, long startblock, long numblocks, long[] buf)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sget_select_hyper_nblocks returns the number of hyperslab blocks in the
+ * current dataspace selection.
+ * 
+ * @param spaceid
+ *            Identifier of dataspace to release.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Sget_select_hyper_nblocks(
+        int spaceid) throws HDF5LibraryException;
+
+/**
+ * H5Sget_select_npoints determines the number of elements in the current
+ * selection of a dataspace.
+ * 
+ * @param space_id IN: Identifier of the dataspace object to query
+ * 
+ * @return the number of elements in the selection if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Sget_select_npoints(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sget_select_type retrieves the type of selection currently defined for the dataspace space_id.
+ * 
+ * @param space_id IN: Identifier of the dataspace object to query
+ * 
+ * @return the dataspace selection type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sget_select_type(int space_id) 
+        throws HDF5LibraryException;
+
+/**
+ * H5Sget_simple_extent_dims returns the size and maximum sizes of each
+ * dimension of a dataspace through the dims and maxdims parameters.
+ * 
+ * @param space_id IN: Identifier of the dataspace object to query
+ * @param dims    OUT: Pointer to array to store the size of each dimension.
+ * @param maxdims OUT: Pointer to array to store the maximum size of each
+ *                     dimension.
+ * 
+ * @return the number of dimensions in the dataspace if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - dims or maxdims is null.
+ **/
+public synchronized static native int H5Sget_simple_extent_dims(
+        int space_id, long[] dims, long[] maxdims)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sget_simple_extent_ndims determines the dimensionality (or rank) of a
+ * dataspace.
+ * 
+ * @param space_id IN: Identifier of the dataspace
+ * 
+ * @return the number of dimensions in the dataspace if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sget_simple_extent_ndims(
+        int space_id) throws HDF5LibraryException;
+
+/**
+ * H5Sget_simple_extent_npoints determines the number of elements in a
+ * dataspace.
+ * 
+ * @param space_id
+ *            ID of the dataspace object to query
+ * @return the number of elements in the dataspace if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Sget_simple_extent_npoints(
+        int space_id) throws HDF5LibraryException;
+
+/**
+ * H5Sget_simple_extent_type queries a dataspace to determine the current
+ * class of a dataspace.
+ * 
+ * @param space_id
+ *            Dataspace identifier.
+ * 
+ * @return a dataspace class name if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sget_simple_extent_type(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sis_simple determines whether a dataspace is a simple dataspace.
+ * 
+ * @param space_id
+ *            Identifier of the dataspace to query
+ * 
+ * @return true if is a simple dataspace
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Sis_simple(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Soffset_simple sets the offset of a simple dataspace space_id.
+ * 
+ * @param space_id
+ *            IN: The identifier for the dataspace object to reset.
+ * @param offset
+ *            IN: The offset at which to position the selection.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - offset array is null.
+ **/
+public synchronized static native int H5Soffset_simple(int space_id,
+        byte[] offset) throws HDF5LibraryException, NullPointerException;
+
+public synchronized static int H5Soffset_simple(int space_id, long[] offset)
+        throws HDF5Exception, NullPointerException
+{
+    if (offset == null) {
+        return -1;
+    }
+
+    HDFArray theArray = new HDFArray(offset);
+    byte[] theArr = theArray.byteify();
+
+    int retVal = H5Soffset_simple(space_id, theArr);
+
+    theArr = null;
+    theArray = null;
+    return retVal;
+}
+
+/**
+ * H5Sselect_all selects the entire extent of the dataspace space_id.
+ * 
+ * @param space_id
+ *            IN: The identifier of the dataspace to be selected.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sselect_all(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sselect_elements selects array elements to be included in the selection
+ * for the space_id dataspace.
+ * 
+ * @param space_id
+ *            Identifier of the dataspace.
+ * @param op
+ *            operator specifying how the new selection is combined.
+ * @param num_elements
+ *            Number of elements to be selected.
+ * @param coord
+ *            A 2-dimensional array specifying the coordinates of the
+ *            elements.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+private synchronized static native int H5Sselect_elements(int space_id,
+        int op, int num_elements, byte[] coord)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sselect_elements selects array elements to be included in the selection
+ * for the space_id dataspace.
+ * 
+ * @param space_id
+ *            Identifier of the dataspace.
+ * @param op
+ *            operator specifying how the new selection is combined.
+ * @param num_elements
+ *            Number of elements to be selected.
+ * @param coord2D
+ *            A 2-dimensional array specifying the coordinates of the
+ *            elements.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5Exception
+ *                - Error in the data conversion
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - cord array is
+ **/
+public synchronized static int H5Sselect_elements(int space_id, int op,
+        int num_elements, long[][] coord2D)
+        throws HDF5Exception, HDF5LibraryException, NullPointerException
+{
+    if (coord2D == null) {
+        return -1;
+    }
+
+    HDFArray theArray = new HDFArray(coord2D);
+    byte[] coord = theArray.byteify();
+
+    int retVal = H5Sselect_elements(space_id, op, num_elements, coord);
+
+    coord = null;
+    theArray = null;
+    return retVal;
+}
+
+///**
+//*  H5Sselect_hyperslab selects a hyperslab region to add to
+//*  the current selected region for the dataspace specified
+//*  by space_id.  The start, stride, count, and block arrays
+//*  must be the same size as the rank of the dataspace.
+//*
+//*  @param space_id IN: Identifier of dataspace selection to modify
+//*  @param op       IN: Operation to perform on current selection.
+//*  @param start    IN: Offset of start of hyperslab
+//*  @param count    IN: Number of blocks included in hyperslab.
+//*  @param stride   IN: Hyperslab stride.
+//*  @param block    IN: Size of block in hyperslab.
+//*
+//*  @return none
+//*
+//*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+//*  @exception NullPointerException - an input array is null.
+//*  @exception IllegalArgumentException - an input array is invalid.
+//**/
+//public synchronized static native void H5Sselect_hyperslab(int space_id, H5S_SELECT_OPER op,
+//    long start[], long _stride[], long count[], long _block[])
+//  throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+//public synchronized static native int H5Scombine_hyperslab(int space_id, H5S_SELECT_OPER op,
+//    const long start[], const long _stride[],
+//    const long count[], const long _block[])
+//  throws HDF5LibraryException, NullPointerException;
+//public synchronized static native int H5Sselect_select(int space1_id, H5S_SELECT_OPER op,
+//    int space2_id)
+//  throws HDF5LibraryException, NullPointerException;
+//public synchronized static native int H5Scombine_select(int space1_id, H5S_SELECT_OPER op,
+//    int space2_id)
+//  throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Sselect_hyperslab selects a hyperslab region to add to the current
+ * selected region for the dataspace specified by space_id. The start,
+ * stride, count, and block arrays must be the same size as the rank of the
+ * dataspace.
+ * 
+ * @param space_id
+ *            IN: Identifier of dataspace selection to modify
+ * @param op
+ *            IN: Operation to perform on current selection.
+ * @param start
+ *            IN: Offset of start of hyperslab
+ * @param stride
+ *            IN: Hyperslab stride.
+ * @param count
+ *            IN: Number of blocks included in hyperslab.
+ * @param block
+ *            IN: Size of block in hyperslab.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ *                - an input array is null.
+ * @exception NullPointerException
+ *                - an input array is invalid.
+ **/
+public synchronized static int H5Sselect_hyperslab(int space_id,
+        int op, byte[] start, byte[] stride, byte[] count, byte[] block)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException
+{
+    ByteBuffer startbb = ByteBuffer.wrap(start);
+    long[] lastart = (startbb.asLongBuffer()).array();
+    ByteBuffer stridebb = ByteBuffer.wrap(stride);
+    long[] lastride = (stridebb.asLongBuffer()).array();
+    ByteBuffer countbb = ByteBuffer.wrap(count);
+    long[] lacount = (countbb.asLongBuffer()).array();
+    ByteBuffer blockbb = ByteBuffer.wrap(block);
+    long[] lablock = (blockbb.asLongBuffer()).array();
+
+    return H5Sselect_hyperslab(space_id, op, lastart, lastride, lacount, lablock);
+}
+
+public synchronized static native int H5Sselect_hyperslab(int space_id, int op,
+        long[] start, long[] stride, long[] count, long[] block)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ * H5Sselect_none resets the selection region for the dataspace space_id to
+ * include no elements.
+ * 
+ * @param space_id
+ *            IN: The identifier of the dataspace to be reset.
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sselect_none(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sselect_valid verifies that the selection for the dataspace.
+ * 
+ * @param space_id
+ *            The identifier for the dataspace in which the selection is
+ *            being reset.
+ * 
+ * @return true if the selection is contained within the extent and FALSE if
+ *         it is not or is an error.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Sselect_valid(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sset_extent_none removes the extent from a dataspace and sets the type
+ * to H5S_NONE.
+ * 
+ * @param space_id
+ *            The identifier for the dataspace from which the extent is to
+ *            be removed.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sset_extent_none(int space_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Sset_extent_simple sets or resets the size of an existing dataspace.
+ * 
+ * @param space_id
+ *            Dataspace identifier.
+ * @param rank
+ *            Rank, or dimensionality, of the dataspace.
+ * @param current_size
+ *            Array containing current size of dataspace.
+ * @param maximum_size
+ *            Array containing maximum size of dataspace.
+ * 
+ * @return a dataspace identifier if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Sset_extent_simple(int space_id,
+        int rank, long[] current_size, long[] maximum_size)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static  int H5Sset_extent_simple(int space_id,
+        int rank, byte[] current_size, byte[] maximum_size)
+        throws HDF5LibraryException, NullPointerException
+{
+    ByteBuffer csbb = ByteBuffer.wrap(current_size);
+    long[] lacs = (csbb.asLongBuffer()).array();
+    ByteBuffer maxsbb = ByteBuffer.wrap(maximum_size);
+    long[] lamaxs = (maxsbb.asLongBuffer()).array();
+
+    return H5Sset_extent_simple(space_id, rank, lacs, lamaxs);
+}
+
+//////////////////////////////////////////////////////////////
+////
+//H5T: Datatype Interface Functions //
+////
+//////////////////////////////////////////////////////////////
+
+/**
+ *  H5Tarray_create creates a new array datatype object. 
+ *
+ *  @deprecated As of HDF5 1.8, replaced by {@link #H5Tarray_create(int, int, long[])}
+ *
+ *  @param base     IN: Datatype identifier for the array base datatype.
+ *  @param rank     IN: Rank of the array.
+ *  @param dims     IN: Size of each array dimension.
+ *  @param perms    IN: Dimension permutation. (Currently not implemented.)
+ *
+ *  @return a valid datatype identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - dims is null.
+ **/
+ at Deprecated
+public static int H5Tarray_create(int base, int rank, int[] dims,
+        int[] perms) throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Tarray_create(base, rank, dims, perms);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tarray_create(int base, int rank,
+        int[] dims, int[] perms)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tarray_create creates a new array datatype object. 
+ *
+ *  @param base_id  IN: Datatype identifier for the array base datatype.
+ *  @param ndims    IN: Rank of the array.
+ *  @param dim      IN: Size of each array dimension.
+ *
+ *  @return a valid datatype identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - dim is null.
+ **/
+public static int H5Tarray_create(int base_id, int ndims, long[] dim)
+throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Tarray_create2(base_id, ndims, dim);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Tarray_create2(int base_id, int ndims, long[] dim)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tclose releases a datatype.
+ * 
+ * @param type_id IN: Identifier of datatype to release.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tclose(int type_id) throws HDF5LibraryException
+{
+    if (type_id < 0)
+    	return 0; // throw new HDF5LibraryException("Negative ID");;
+    
+    OPEN_IDS.removeElement(type_id);
+    return _H5Tclose(type_id);
+}
+
+private synchronized static native int _H5Tclose(int type_id)
+        throws HDF5LibraryException;
+/**
+* H5Tcommit commits a transient datatype (not immutable) to a file, turned
+* it into a named datatype.
+* 
+* @deprecated As of HDF5 1.8, replaced by {@link #H5Tcommit(int, String, int, int, int, int)}
+*
+* @param loc_id   IN: Location identifier.
+* @param name     IN: Name given to committed datatype.
+* @param type_id  IN: Identifier of datatype to be committed.
+* 
+* @return a non-negative value if successful
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+* @exception NullPointerException - name is null.
+**/
+ at Deprecated
+public static int H5Tcommit(int loc_id, String name,
+       int type_id) throws HDF5LibraryException, NullPointerException
+{
+   return H5Tcommit1(loc_id, name, type_id);
+}
+ at Deprecated
+public synchronized static native int H5Tcommit1(int loc_id, String name,
+       int type) throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tcommit saves a transient datatype as an immutable named datatype in a file.
+ *
+ *  @param loc_id   IN: Location identifier.
+ *  @param name     IN: Name given to committed datatype.
+ *  @param type_id  IN: Identifier of datatype to be committed.
+ *  @param lcpl_id  IN: Link creation property list.
+ *  @param tcpl_id  IN: Datatype creation property list.
+ *  @param tapl_id  IN: Datatype access property list.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Tcommit(int loc_id, String name, int type_id, int lcpl_id,
+        int tcpl_id, int tapl_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tcommit_anon commits a transient datatype (not immutable) to a file, 
+ *  turning it into a named datatype with the specified creation and property lists.
+ *
+ *  @param loc_id   IN: Location identifier.
+ *  @param type_id  IN: Identifier of datatype to be committed.
+ *  @param tcpl_id  IN: Datatype creation property list.
+ *  @param tapl_id  IN: Datatype access property list.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tcommit_anon(int loc_id, int type_id, int tcpl_id, int tapl_id)
+        throws HDF5LibraryException;
+
+/**
+* H5Tcommitted queries a type to determine whether the type specified by
+* the type identifier is a named type or a transient type.
+* 
+* @param type_id   IN: Identifier of datatype.
+* 
+* @return true the datatype has been committed
+* 
+* @exception HDF5LibraryException - Error from the HDF-5 Library.
+**/
+public synchronized static native boolean H5Tcommitted(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Tcompiler_conv finds out whether the library's conversion function from 
+ *  type src_id to type dst_id is a compiler (hard) conversion.
+ *
+ *  @param src_id     IN: Identifier of source datatype.
+ *  @param dst_id     IN: Identifier of destination datatype.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tcompiler_conv(int src_id, int dst_id)
+throws HDF5LibraryException;
+
+/**
+ **  H5Tconvert converts nelmts elements from the type specified by the src_id identifier to type dst_id.
+ *
+ *  @param src_id     IN: Identifier of source datatype.
+ *  @param dst_id     IN: Identifier of destination datatype.
+ *  @param nelmts     IN: Size of array buf.
+ *  @param buf        IN: Array containing pre- and post-conversion values.
+ *  @param background IN: Optional background buffer.
+ *  @param plist_id   IN: Dataset transfer property list identifier.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native void H5Tconvert(int src_id, int dst_id, long nelmts, byte[] buf,
+        byte[] background, int plist_id)
+        throws HDF5LibraryException, NullPointerException;
+//  int H5Tconvert(int src_id, int dst_id, long nelmts, Pointer buf, Pointer background, int plist_id);
+
+/**
+ *  H5Tcopy copies an existing datatype. The returned type is
+ *  always transient and unlocked.
+ *
+ *  @param type_id IN: Identifier of datatype to copy. Can be a datatype 
+ *                      identifier, a  predefined datatype (defined in 
+ *                      H5Tpublic.h), or a dataset Identifier.
+ *
+ *  @return a datatype identifier if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tcopy(int type_id) throws HDF5LibraryException
+{
+    int id = _H5Tcopy(type_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tcopy(int type_id)
+        throws HDF5LibraryException;
+/**
+ * H5Tcreate creates a new dataype of the specified class with the specified
+ * number of bytes.
+ * 
+ * @param dclass IN: Class of datatype to create.
+ * @param size   IN: The number of bytes in the datatype to create.
+ * 
+ * @return datatype identifier if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tcreate(int dclass, int size)
+        throws HDF5LibraryException
+{
+    return H5Tcreate(dclass, (long)size);
+}
+
+/**
+ *  H5Tcreate creates a new dataype of the specified class with
+ *  the specified number of bytes.
+ *
+ *  @param tclass IN: Class of datatype to create.
+ *  @param size   IN: The number of bytes in the datatype to create.
+ *
+ *  @return datatype identifier
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tcreate(int tclass, long size)
+    throws HDF5LibraryException
+{
+    int id = _H5Tcreate(tclass, size);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Tcreate(int type, long size)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tdecode reconstructs the HDF5 data type object and 
+ *  returns a new object handle for it.
+ *
+ *  @param buf   IN: Buffer for the data type object to be decoded.
+ *
+ *  @return a new object handle
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native int H5Tdecode(byte[] buf)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tdetect_class determines whether the datatype specified in dtype_id contains 
+ *  any datatypes of the datatype class specified in dtype_class. 
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *  @param cls      IN: Identifier of datatype cls.
+ *
+ *  @return true if the datatype specified in dtype_id contains any datatypes of the datatype class
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Tdetect_class(int type_id, int cls)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tencode converts a data type description into binary form in a buffer.
+ *
+ *  @param obj_id   IN: Identifier of the object to be encoded.
+ *  @param buf     OUT: Buffer for the object to be encoded into. 
+ *                      If the provided buffer is NULL, only the 
+ *                      size of buffer needed is returned.
+ *  @param nalloc   IN: The size of the allocated buffer.
+ *
+ *  @return the size needed for the allocated buffer.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - buf is null.
+ **/
+public synchronized static native int H5Tencode(int obj_id, byte[] buf, long nalloc)
+throws HDF5LibraryException, NullPointerException;
+///**
+// *  H5Tencode converts a data type description into binary form in a buffer.
+// *
+// *  @param obj_id   IN: Identifier of the object to be encoded.
+// *
+// *  @return the buffer for the object to be encoded into.
+// *
+// *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+// **/
+//public synchronized static native byte[] H5Tencode(int obj_id)
+//throws HDF5LibraryException;
+
+/**
+ * H5Tenum_create creates a new enumeration datatype based on the specified
+ * base datatype, parent_id, which must be an integer type.
+ * 
+ *  @param base_id IN: Identifier of the parent datatype to release.
+ *
+ *  @return the datatype identifier for the new enumeration datatype
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tenum_create(int base_id) throws HDF5LibraryException
+{
+    int id = _H5Tenum_create(base_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tenum_create(int base_id)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Tenum_insert inserts a new enumeration datatype member
+ *  into an enumeration datatype.
+ *
+ *  @param type  IN: Identifier of datatype.
+ *  @param name  IN: The name of the member
+ *  @param value IN: The value of the member, data of the correct type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native void H5Tenum_insert(int type, String name, byte[] value)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tenum_insert inserts a new enumeration datatype member into an
+ * enumeration datatype.
+ * 
+ * @param type  IN: Identifier of datatype.
+ * @param name  IN: The name of the member
+ * @param value IN: The value of the member, data of the correct type
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - name is null.
+ **/
+public static int H5Tenum_insert(int type, String name,
+        int[] value) throws HDF5LibraryException, NullPointerException
+{
+    return H5Tenum_insert_int(type, name, value);
+}
+
+public static int H5Tenum_insert(int type, String name,
+        int value) throws HDF5LibraryException, NullPointerException
+{
+    int[] val = { value };
+    return H5Tenum_insert_int(type, name, val);
+}
+
+private synchronized static native int H5Tenum_insert_int(int type, String name,
+        int[] value) throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tenum_nameof finds the symbol name that corresponds
+ *  to the specified value of the enumeration datatype type.
+ *
+ *  @param type   IN: Identifier of datatype.
+ *  @param value  IN: The value of the member, data of the correct
+ *  @param size   IN: The probable length of the name
+ *
+ *  @return the symbol name.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - value is null.
+ **/
+public synchronized static native String H5Tenum_nameof(int type, byte[] value, long size)
+throws HDF5LibraryException, NullPointerException;
+//int H5Tenum_nameof(int type, Pointer value, Buffer name/* out */, long size);
+
+/**
+ * H5Tenum_nameof finds the symbol name that corresponds to the specified
+ * value of the enumeration datatype type.
+ * 
+ * @param type  IN: Identifier of datatype.
+ * @param value IN: The value of the member, data of the correct
+ * @param name OUT: The name of the member
+ * @param size  IN: The max length of the name
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - name is null.
+ **/
+public static int H5Tenum_nameof(int type, int[] value,
+        String[] name, int size)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Tenum_nameof_int(type, value, name, size);
+}
+private synchronized static native int H5Tenum_nameof_int(int type, int[] value,
+        String[] name, int size)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tenum_valueof finds the value that corresponds to
+ *  the specified name of the enumeration datatype type.
+ *
+ *  @param type   IN: Identifier of datatype.
+ *  @param name   IN: The name of the member
+ *  @param value OUT: The value of the member
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tenum_valueof(int type, String name, byte[] value)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tenum_valueof finds the value that corresponds to the specified name of
+ * the enumeration datatype type.
+ * 
+ * @param type   IN: Identifier of datatype.
+ * @param name   IN: The name of the member
+ * @param value OUT: The value of the member
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - name is null.
+ **/
+public static int H5Tenum_valueof(int type,
+        String name, int[] value)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Tenum_valueof_int(type, name, value);
+}
+private synchronized static native int H5Tenum_valueof_int(int type,
+        String name, int[] value)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tequal determines whether two datatype identifiers refer
+ *  to the same datatype.
+ *
+ *  @param type_id1 IN: Identifier of datatype to compare.
+ *  @param type_id2 IN: Identifier of datatype to compare.
+ *
+ *  @return true if the datatype identifiers refer to the
+ *  same datatype, else false.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Tequal(int type_id1,
+        int type_id2) throws HDF5LibraryException;
+
+/**
+ *  H5Tget_array_dims returns the sizes of the dimensions of the specified array datatype object. 
+ *
+ *  @deprecated As of HDF5 1.8
+ *
+ *  @param type_id  IN: Datatype identifier of array object.
+ *  @param dims    OUT: Sizes of array dimensions.
+ *  @param perm    OUT: Dimension permutations. (This parameter is not used.)
+ *
+ *  @return the non-negative number of dimensions of the array type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - dims is null.
+ **/
+ at Deprecated
+public synchronized static native int H5Tget_array_dims(int type_id, int[] dims,
+        int[] perm) throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tget_array_dims returns the sizes of the dimensions of the specified array datatype object. 
+ *
+ *  @deprecated As of HDF5 1.8, replaced by {@link #H5Tget_array_dims(int, long[])}
+ *
+ *  @param type_id  IN: Datatype identifier of array object.
+ *  @param dims    OUT: Sizes of array dimensions.
+ *  @param perm    OUT: Dimension permutation. (Currently not implemented.)
+ *
+ *  @return the non-negative number of dimensions of the array type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ at Deprecated
+public static int H5Tget_array_dims(int type_id, long[] dims, int[] perm)
+throws HDF5LibraryException, NullPointerException
+{
+  return H5Tget_array_dims1(type_id, dims, perm);
+}
+/**
+ *  H5Tget_array_dims1 returns the sizes of the dimensions of the specified array datatype object. 
+ *
+ *  @deprecated As of HDF5 1.8, replaced by {@link #H5Tget_array_dims2(int, long[])}
+ *
+ *  @see public static int H5Tget_array_dims(int type_id, long[] dims, int[] perm)
+ **/
+ at Deprecated
+private synchronized static native int H5Tget_array_dims1(int type_id, long[] dims, int[] perm)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tget_array_dims returns the sizes of the dimensions of the specified array datatype object. 
+ *
+ *  @param type_id  IN: Datatype identifier of array object.
+ *  @param dims    OUT: Sizes of array dimensions.
+ *
+ *  @return the non-negative number of dimensions of the array type
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - dims is null.
+ **/
+public static int H5Tget_array_dims(int type_id, long[] dims)
+throws HDF5LibraryException, NullPointerException
+{
+  return H5Tget_array_dims2(type_id, dims);
+}
+/**
+ *  H5Tget_array_dims2 returns the sizes of the dimensions of the specified array datatype object. 
+ *
+ *  @see #H5Tget_array_dims
+ **/
+public synchronized static native int H5Tget_array_dims2(int type_id, long[] dims)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tget_array_ndims returns the rank, the number of dimensions, of an array datatype object. 
+ *
+ *  @param type_id  IN: Datatype identifier of array object.
+ *
+ *  @return the rank of the array
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_array_ndims(int type_id)
+throws HDF5LibraryException;
+
+/**
+ * H5Tget_class returns the datatype class identifier.
+ * 
+ *  @param type_id  IN: Identifier of datatype to query.
+ *
+ *  @return datatype class identifier if successful; otherwise H5T_NO_CLASS(-1).
+ * 
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_class(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_class_name returns the datatype class identifier.
+ * 
+ *  @param class_id  IN: Identifier of class from H5Tget_class.
+ *
+ *  @return class name if successful; otherwise H5T_NO_CLASS.
+ * 
+ **/
+public static String H5Tget_class_name(int class_id)
+{
+    String retValue = null;
+    if(HDF5Constants.H5T_INTEGER==class_id)  /*integer types             */
+        retValue = "H5T_INTEGER";
+    else if(HDF5Constants.H5T_FLOAT==class_id)    /*floating-point types      */
+        retValue = "H5T_FLOAT";
+    else if(HDF5Constants.H5T_TIME==class_id)     /*date and time types       */
+        retValue = "H5T_TIME";
+    else if(HDF5Constants.H5T_STRING==class_id)   /*character string types    */
+        retValue = "H5T_STRING";
+    else if(HDF5Constants.H5T_BITFIELD==class_id) /*bit field types           */
+        retValue = "H5T_BITFIELD";
+    else if(HDF5Constants.H5T_OPAQUE==class_id)   /*opaque types              */
+        retValue = "H5T_OPAQUE";
+    else if(HDF5Constants.H5T_COMPOUND==class_id) /*compound types           */
+        retValue = "H5T_COMPOUND";
+    else if(HDF5Constants.H5T_REFERENCE==class_id)/*reference types          */
+        retValue = "H5T_REFERENCE";
+    else if(HDF5Constants.H5T_ENUM==class_id)     /*enumeration types        */
+        retValue = "H5T_ENUM";
+    else if(HDF5Constants.H5T_VLEN==class_id)     /*Variable-Length types    */
+        retValue = "H5T_VLEN";
+    else if(HDF5Constants.H5T_ARRAY==class_id)    /*Array types              */
+        retValue = "H5T_ARRAY";
+    else
+        retValue = "H5T_NO_CLASS";
+
+    return retValue;
+}
+
+/**
+ *  H5Tget_create_plist returns a property list identifier for the datatype 
+ *  creation property list associated with the datatype specified by type_id. 
+ *
+ *  @param type_id   IN: Identifier of datatype.
+ *
+ *  @return a datatype property list identifier.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_create_plist(int type_id)
+throws HDF5LibraryException;
+
+/**
+ * H5Tget_cset retrieves the character set type of a string datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a valid character set type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_cset(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_cset the character set to be used.
+ * 
+ * @param type_id  IN: Identifier of datatype to modify.
+ * @param cset     IN: Character set type.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_cset(int type_id, int cset)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_ebias retrieves the exponent bias of a floating-point type.
+ * 
+ * @param type_id
+ *            Identifier of datatype to query.
+ * 
+ * @return the bias if successful; otherwise 0.
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_ebias(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_ebias sets the exponent bias of a floating-point type.
+ * 
+ * @param type_id
+ *            Identifier of datatype to set.
+ * @param ebias
+ *            Exponent bias value.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Tset_ebias(int type_id, int ebias)
+        throws HDF5LibraryException
+{
+    H5Tset_ebias(type_id, (long)ebias);
+    return 0;
+}
+
+/**
+ *  H5Tget_ebias retrieves the exponent bias of a
+ *  floating-point type.
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *
+ *  @return the bias 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Tget_ebias_long(int type_id)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tset_ebias sets the exponent bias of a floating-point type.
+ *
+ *  @param type_id  IN: Identifier of datatype to set.
+ *  @param ebias    IN: Exponent bias value.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tset_ebias(int type_id, long ebias)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tget_fields retrieves information about the locations of
+ *  the various bit fields of a floating point datatype.
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *  @param fields  OUT: location of size and bit-position.
+ *  <ul>
+ *      <li>fields[0] = spos  OUT: location to return size of in bits.</li>
+ *      <li>fields[1] = epos  OUT: location to return exponent bit-position.</li>
+ *      <li>fields[2] = esize OUT: location to return size of exponent in bits.</li>
+ *      <li>fields[3] = mpos  OUT: location to return mantissa bit-position.</li>
+ *      <li>fields[4] = msize OUT: location to return size of mantissa in bits.</li>
+ *  </ul>
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - fields is null.
+ *  @exception IllegalArgumentException - fields array is invalid.
+ **/
+public synchronized static native void H5Tget_fields(int type_id, long[] fields)
+throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+/**
+ * H5Tget_fields retrieves information about the locations of the various
+ * bit fields of a floating point datatype.
+ * 
+ * @param type_id IN: Identifier of datatype to query.
+ * @param fields OUT: location of size and bit-position.
+ * 
+ * <pre>
+ *      fields[0] = spos  OUT: location to return size of in bits.
+ *      fields[1] = epos  OUT: location to return exponent bit-position.
+ *      fields[2] = esize OUT: location to return size of exponent in bits.
+ *      fields[3] = mpos  OUT: location to return mantissa bit-position.
+ *      fields[4] = msize OUT: location to return size of mantissa in bits.
+ * </pre>
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - fields is null.
+ * @exception IllegalArgumentException - fields array is invalid.
+ **/
+public static int H5Tget_fields(int type_id, int[] fields)
+        throws HDF5LibraryException, NullPointerException, IllegalArgumentException
+{
+    return H5Tget_fields_int(type_id, fields);
+}
+private synchronized static native int H5Tget_fields_int(int type_id,
+        int[] fields)
+        throws HDF5LibraryException, NullPointerException,
+        IllegalArgumentException;
+
+/**
+ *  H5Tset_fields sets the locations and sizes of the various
+ *  floating point bit fields.
+ *
+ *  @param type_id  IN: Identifier of datatype to set.
+ *  @param spos     IN: Size position.
+ *  @param epos     IN: Exponent bit position.
+ *  @param esize    IN: Size of exponent in bits.
+ *  @param mpos     IN: Mantissa bit position.
+ *  @param msize    IN: Size of mantissa in bits.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tset_fields(int type_id, long spos, long epos, long esize, long mpos, long msize)
+throws HDF5LibraryException;
+
+/**
+ * H5Tset_fields sets the locations and sizes of the various floating point
+ * bit fields.
+ * 
+ * @param type_id
+ *            Identifier of datatype to set.
+ * @param spos
+ *            Size position.
+ * @param epos
+ *            Exponent bit position.
+ * @param esize
+ *            Size of exponent in bits.
+ * @param mpos
+ *            Mantissa bit position.
+ * @param msize
+ *            Size of mantissa in bits.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Tset_fields(int type_id, int spos,
+        int epos, int esize, int mpos, int msize)
+        throws HDF5LibraryException
+{
+    H5Tset_fields(type_id, (long)spos, (long)epos, (long)esize, 
+            (long)mpos, (long)msize);
+    return 0;
+}
+
+/**
+ * H5Tget_inpad retrieves the internal padding type for unused bits in
+ * floating-point datatypes.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a valid padding type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_inpad(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * If any internal bits of a floating point type are unused (that is, those
+ * significant bits which are not part of the sign, exponent, or mantissa),
+ * then H5Tset_inpad will be filled according to the value of the padding
+ * value property inpad.
+ * 
+ * @param type_id  IN: Identifier of datatype to modify.
+ * @param inpad    IN: Padding type.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_inpad(int type_id, int inpad)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_member_class returns the datatype of the specified member.
+ * 
+ * @param type_id   IN: Datatype identifier of compound object.
+ * @param membno    IN: Compound object member number.
+ * 
+ * @return the identifier of a copy of the datatype of the field if successful;
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_member_class(int type_id,
+        int membno) throws HDF5LibraryException;
+
+/**
+ * H5Tget_member_index retrieves the index of a field of a compound
+ * datatype.
+ * 
+ * @param type_id    IN: Identifier of datatype to query.
+ * @param field_name IN: Field name of the field index to retrieve.
+ * 
+ * @return if field is defined, the index; else negative.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_member_index(int type_id, String field_name);
+
+/**
+ * H5Tget_member_name retrieves the name of a field of a compound datatype or 
+ * an element of an enumeration datatype. 
+ * 
+ * @param type_id    IN: Identifier of datatype to query.
+ * @param field_idx  IN: Field index (0-based) of the field name to retrieve.
+ * 
+ * @return a valid pointer to the name if successful; otherwise null.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native String H5Tget_member_name(int type_id, int field_idx);
+
+/**
+ * H5Tget_member_offset returns the byte offset of the specified member of
+ * the compound datatype. This is the byte offset in the HDF-5 file/library,
+ * NOT the offset of any Java object which might be mapped to this data
+ * item.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * @param membno   IN: Field index (0-based) of the field type to retrieve.
+ * 
+ * @return the offset of the member.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Tget_member_offset(int type_id,
+        int membno) throws HDF5LibraryException;
+
+/**
+ * H5Tget_member_type returns the datatype of the specified member.
+ * 
+ * @param type_id   IN: Identifier of datatype to query.
+ * @param field_idx IN: Field index (0-based) of the field type to retrieve.
+ * 
+ * @return the identifier of a copy of the datatype of the field if
+ *         successful;
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tget_member_type(int type_id, int field_idx)
+        throws HDF5LibraryException
+{
+    int id = _H5Tget_member_type(type_id, field_idx);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tget_member_type(int type_id,
+        int field_idx) throws HDF5LibraryException;
+
+/**
+ *  H5Tget_member_value returns the value of the enumeration datatype member memb_no. 
+ *
+ *  @param type_id  IN: Datatype identifier for the enumeration datatype.
+ *  @param membno   IN: Number of the enumeration datatype member.
+ *  @param value   OUT: The value of the member
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - value is null.
+ **/
+public synchronized static native void H5Tget_member_value(int type_id, int membno, byte[] value)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tget_member_value returns the value of the enumeration datatype member
+ * memb_no.
+ * 
+ * @param type_id IN: Identifier of datatype.
+ * @param membno  IN: The name of the member
+ * @param value  OUT: The value of the member
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - value is null.
+ **/
+public static int H5Tget_member_value(int type_id,
+        int membno, int[] value)
+        throws HDF5LibraryException, NullPointerException
+{
+    return H5Tget_member_value_int(type_id, membno, value);
+}
+private synchronized static native int H5Tget_member_value_int(int type_id,
+        int membno, int[] value)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id. 
+ *
+ *  @param type_id   IN: Identifier of datatype to query.
+ *                       Direction of search is assumed to be in ascending order.
+ *
+ *  @return the native datatype identifier for the specified dataset datatype. 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static int H5Tget_native_type(int type_id)
+        throws HDF5LibraryException 
+{
+    return H5Tget_native_type(type_id, HDF5Constants.H5T_DIR_ASCEND);
+}
+
+/**
+ *  H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id. 
+ *
+ *  @param type_id   IN: Identifier of datatype to query.
+ *  @param direction IN: Direction of search.
+ *
+ *  @return the native datatype identifier for the specified dataset datatype. 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tget_native_type(int type_id, int direction)
+        throws HDF5LibraryException
+{
+    int id = _H5Tget_native_type(type_id, direction);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+private synchronized static native int _H5Tget_native_type(int tid,
+        int direction) throws HDF5LibraryException;
+
+/**
+ * H5Tget_nmembers retrieves the number of fields a compound datatype has.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return number of members datatype has if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_nmembers(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_norm retrieves the mantissa normalization of a floating-point
+ * datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a valid normalization type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_norm(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_norm sets the mantissa normalization of a floating-point datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to set.
+ * @param norm     IN: Mantissa normalization type.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_norm(int type_id, int norm)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_offset retrieves the bit offset of the first significant bit.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ *
+ * @return a positive offset value if successful; otherwise 0.
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_offset(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_offset sets the bit offset of the first significant bit.
+ * 
+ * @param type_id
+ *            Identifier of datatype to set.
+ * @param offset
+ *            Offset of first significant bit.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Tset_offset(int type_id, int offset)
+        throws HDF5LibraryException
+{
+    H5Tset_offset(type_id, (long)offset);
+    return 0;
+}
+
+/**
+ *  H5Tset_offset sets the bit offset of the first significant bit.
+ *
+ *  @param type_id  IN: Identifier of datatype to set.
+ *  @param offset   IN: Offset of first significant bit.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tset_offset(int type_id, long offset)
+throws HDF5LibraryException;
+
+/**
+ * H5Tget_order returns the byte order of an atomic datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a byte order constant if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_order(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_order sets the byte ordering of an atomic datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to set.
+ * @param order    IN: Byte ordering constant.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_order(int type_id, int order)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_pad retrieves the padding type of the least and most-significant
+ * bit padding.
+ * 
+ * @param type_id IN: Identifier of datatype to query.
+ * @param pad    OUT: locations to return least-significant and
+ *                    most-significant bit padding type.
+ * 
+ *            <pre>
+ *      pad[0] = lsb // least-significant bit padding type
+ *      pad[1] = msb // most-significant bit padding type
+ * </pre>
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - pad is null.
+ **/
+public synchronized static native int H5Tget_pad(int type_id, int[] pad)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tset_pad sets the least and most-significant bits padding types.
+ * 
+ * @param type_id  IN: Identifier of datatype to set.
+ * @param lsb      IN: Padding type for least-significant bits.
+ * @param msb      IN: Padding type for most-significant bits.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_pad(int type_id, int lsb,
+        int msb) throws HDF5LibraryException;
+
+/**
+ * H5Tget_precision returns the precision of an atomic datatype.
+ * 
+ * @param type_id
+ *            Identifier of datatype to query.
+ * 
+ * @return the number of significant bits if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_precision(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_precision sets the precision of an atomic datatype.
+ * 
+ * @param type_id
+ *            Identifier of datatype to set.
+ * @param precision
+ *            Number of bits of precision for datatype.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Tset_precision(int type_id,
+        int precision) throws HDF5LibraryException
+{
+    H5Tset_precision(type_id, (long)precision);
+    return 0;
+}
+
+/**
+ *  H5Tget_precision returns the precision of an atomic datatype.
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *
+ *  @return the number of significant bits if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Tget_precision_long(int type_id)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tset_precision sets the precision of an atomic datatype.
+ *
+ *  @param type_id    IN: Identifier of datatype to set.
+ *  @param precision  IN: Number of bits of precision for datatype.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tset_precision(int type_id, long precision)
+throws HDF5LibraryException;
+
+/**
+ * H5Tget_sign retrieves the sign type for an integer type.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a valid sign type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_sign(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_sign sets the sign proprety for an integer type.
+ * 
+ * @param type_id  IN: Identifier of datatype to set.
+ * @param sign     IN: Sign type.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_sign(int type_id, int sign)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_size returns the size of a datatype in bytes.
+ * 
+ * @param type_id
+ *            Identifier of datatype to query.
+ * 
+ * @return the size of the datatype in bytes if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_size(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_size sets the total size in bytes, size, for an atomic datatype
+ * (this operation is not permitted on compound datatypes).
+ * 
+ * @param type_id
+ *            Identifier of datatype to change size.
+ * @param size
+ *            Size in bytes to modify datatype.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException
+ *                - Error from the HDF-5 Library.
+ **/
+public static int H5Tset_size(int type_id, int size)
+        throws HDF5LibraryException
+{
+    H5Tset_size(type_id, (long)size);
+    return 0;
+}
+
+/**
+ *  H5Tget_size returns the size of a datatype in bytes.
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *
+ *  @return the size of the datatype in bytes
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native long H5Tget_size_long(int type_id)
+throws HDF5LibraryException;
+
+/**
+ *  H5Tset_size sets the total size in bytes, size, for an
+ *  atomic datatype (this operation is not permitted on
+ *  compound datatypes).
+ *
+ *  @param type_id  IN: Identifier of datatype to change size.
+ *  @param size     IN: Size in bytes to modify datatype.
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native void H5Tset_size(int type_id, long size)
+throws HDF5LibraryException;
+
+/**
+ * H5Tget_strpad retrieves the string padding method for a string datatype.
+ * 
+ * @param type_id  IN: Identifier of datatype to query.
+ * 
+ * @return a valid string padding type if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tget_strpad(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_strpad defines the storage mechanism for the string.
+ * 
+ * @param type_id IN: Identifier of datatype to modify.
+ * @param strpad  IN: String padding type.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_strpad(int type_id, int strpad)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_super returns the type from which TYPE is derived.
+ * 
+ * @param type IN: Identifier of datatype.
+ * 
+ * @return the parent type
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tget_super(int type) throws HDF5LibraryException
+{
+    int id = _H5Tget_super(type);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tget_super(int type)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tget_tag returns the tag associated with datatype type_id.
+ * 
+ * @param type IN: Identifier of datatype.
+ * 
+ * @return the tag
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native String H5Tget_tag(int type)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tset_tag tags an opaque datatype type_id with a unique ASCII identifier
+ * tag.
+ * 
+ * @param type IN: Datatype identifier for the opaque datatype to be tagged.
+ * @param tag  IN: Descriptive ASCII string with which the opaque datatype is to be tagged.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tset_tag(int type, String tag)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Tinsert adds another member to the compound datatype type_id.
+ *
+ *  @param type_id  IN: Identifier of compound datatype to modify.
+ *  @param name     IN: Name of the field to insert.
+ *  @param offset   IN: Offset in memory structure of the field to insert.
+ *  @param field_id IN: Datatype identifier of the field to insert.
+ * 
+ *  @return a non-negative value if successful
+ * 
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+public synchronized static native int H5Tinsert(int type_id, String name,
+        long offset, int field_id)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+ *  H5Tis_variable_str determines whether the datatype identified in type_id is a variable-length string. 
+ *
+ *  @param type_id  IN: Identifier of datatype to query.
+ *
+ *  @return true if type_id is a variable-length string. 
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native boolean H5Tis_variable_str(int type_id)
+throws HDF5LibraryException;
+
+/**
+ * H5Tlock locks the datatype specified by the type_id identifier, making it
+ * read-only and non-destrucible.
+ * 
+ * @param type_id IN: Identifier of datatype to lock.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tlock(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ *  H5Topen opens a named datatype at the location specified
+ *  by loc_id and return an identifier for the datatype.
+ *
+ *  @deprecated As of HDF5 1.8, replaced by {@link #H5Topen(int, String, int)}
+ *
+ *  @param loc_id   IN: A file, group, or datatype identifier.
+ *  @param name     IN: A datatype name, defined within the file or group identified by loc_id.
+ *
+ *  @return a named datatype identifier if successful
+ *
+ *  @exception HDF5LibraryException - Error from the HDF-5 Library.
+ *  @exception NullPointerException - name is null.
+ **/
+ at Deprecated
+public static int H5Topen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Topen(loc_id, name);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Topen(int loc_id, String name)
+        throws HDF5LibraryException, NullPointerException;
+
+/**
+*  H5Topen opens a named datatype at the location specified
+*  by loc_id and return an identifier for the datatype.
+*
+*  @param loc_id   IN: A file, group, or datatype identifier.
+*  @param name     IN: A datatype name, defined within the file or group identified by loc_id.
+*  @param tapl_id  IN: Datatype access property list.
+*
+*  @return a named datatype identifier if successful
+*
+*  @exception HDF5LibraryException - Error from the HDF-5 Library.
+*  @exception NullPointerException - name is null.
+**/
+public static int H5Topen(int loc_id, String name, int tapl_id)
+throws HDF5LibraryException, NullPointerException
+{
+    int id = _H5Topen2(loc_id, name, tapl_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Topen2(int loc_id, String name, int tapl_id)
+throws HDF5LibraryException, NullPointerException;
+
+/**
+ * H5Tpack recursively removes padding from within a compound datatype to
+ * make it more efficient (space-wise) to store that data.
+ * <P>
+ * <b>WARNING:</b> This call only affects the C-data, even if it succeeds,
+ * there may be no visible effect on Java objects.
+ * 
+ * @param type_id IN: Identifier of datatype to modify.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public synchronized static native int H5Tpack(int type_id)
+        throws HDF5LibraryException;
+
+/**
+ * H5Tvlen_create creates a new variable-length (VL) dataype.
+ * 
+ * @param base_id  IN: Identifier of parent datatype.
+ * 
+ * @return a non-negative value if successful
+ * 
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+public static int H5Tvlen_create(int base_id) throws HDF5LibraryException
+{
+    int id = _H5Tvlen_create(base_id);
+    if (id > 0)
+        OPEN_IDS.addElement(id);
+    return id;
+}
+
+private synchronized static native int _H5Tvlen_create(int base_id)
+        throws HDF5LibraryException;
+
+///////// unimplemented ////////
+
+//H5T_conv_t H5Tfind(int src_id, int dst_id, H5T_cdata_t *pcdata);
+
+//public synchronized static native int H5Tregister(H5T_pers_t pers, String name, int src_id, int dst_id,
+//            H5T_conv_t func)
+//    throws HDF5LibraryException, NullPointerException;
+
+//public synchronized static native int H5Tunregister(H5T_pers_t pers, String name, int src_id, int dst_id,
+//            H5T_conv_t func)
+//    throws HDF5LibraryException, NullPointerException;
+
+
+//////////////////////////////////////////////////////////////
+////
+//H5Z: Filter Interface Functions //
+////
+//////////////////////////////////////////////////////////////
+
+public synchronized static native int H5Zfilter_avail(int filter)
+        throws HDF5LibraryException, NullPointerException;
+
+public synchronized static native int H5Zget_filter_info(int filter)
+        throws HDF5LibraryException;
+
+public synchronized static native int H5Zunregister(int filter)
+        throws HDF5LibraryException, NullPointerException;
+
+}
+
+///////// unimplemented ////////
+
+//herr_t H5Zregister(const void *cls);
+
diff --git a/source/java/ncsa/hdf/hdf5lib/HDF5Constants.java b/source/java/ncsa/hdf/hdf5lib/HDF5Constants.java
new file mode 100755
index 0000000..4b850bd
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/HDF5Constants.java
@@ -0,0 +1,1790 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib;
+
+/**
+ * /** This class contains C constants and enumerated types of HDF5 library. The
+ * values of these constants are obtained from the library by calling J2C(int
+ * jconstant), where jconstant is any of the private constants which start their
+ * name with "JH5" need to be converted.
+ * <P>
+ * <B>Do not edit this file!</b>
+ * 
+ * <b>See also:</b> ncsa.hdf.hdf5lib.HDF5Library
+ */
+public class HDF5Constants {
+    static {
+        H5.loadH5Lib();
+    }
+
+    // /////////////////////////////////////////////////////////////////////////
+    // Get the HDF5 constants from the library //
+    // /////////////////////////////////////////////////////////////////////////
+
+    public static final long H5_QUARTER_HADDR_MAX = H5_QUARTER_HADDR_MAX();
+
+    public static final int H5_SZIP_MAX_PIXELS_PER_BLOCK = H5_SZIP_MAX_PIXELS_PER_BLOCK();
+    public static final int H5_SZIP_NN_OPTION_MASK = H5_SZIP_NN_OPTION_MASK();
+    public static final int H5_SZIP_EC_OPTION_MASK = H5_SZIP_EC_OPTION_MASK();
+    public static final int H5_SZIP_ALLOW_K13_OPTION_MASK = H5_SZIP_ALLOW_K13_OPTION_MASK();
+    public static final int H5_SZIP_CHIP_OPTION_MASK = H5_SZIP_CHIP_OPTION_MASK();
+    public static final int H5_INDEX_UNKNOWN = H5_INDEX_UNKNOWN();
+    public static final int H5_INDEX_NAME = H5_INDEX_NAME();
+    public static final int H5_INDEX_CRT_ORDER = H5_INDEX_CRT_ORDER();
+    public static final int H5_INDEX_N = H5_INDEX_N();
+    public static final int H5_ITER_UNKNOWN = H5_ITER_UNKNOWN();
+    public static final int H5_ITER_INC = H5_ITER_INC();
+    public static final int H5_ITER_DEC = H5_ITER_DEC();
+    public static final int H5_ITER_NATIVE = H5_ITER_NATIVE();
+    public static final int H5_ITER_N = H5_ITER_N();
+    public static final int H5AC_CURR_CACHE_CONFIG_VERSION = H5AC_CURR_CACHE_CONFIG_VERSION();
+    public static final int H5AC_MAX_TRACE_FILE_NAME_LEN = H5AC_MAX_TRACE_FILE_NAME_LEN();
+    public static final int H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY = H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY();
+    public static final int H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED = H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED();
+    public static final int H5C_incr_off = H5C_incr_off();
+    public static final int H5C_incr_threshold = H5C_incr_threshold();
+    public static final int H5C_flash_incr_off = H5C_flash_incr_off();
+    public static final int H5C_flash_incr_add_space = H5C_flash_incr_add_space();
+    public static final int H5C_decr_off = H5C_decr_off();
+    public static final int H5C_decr_threshold = H5C_decr_threshold();
+    public static final int H5C_decr_age_out = H5C_decr_age_out();
+    public static final int H5C_decr_age_out_with_threshold = H5C_decr_age_out_with_threshold();
+    public static final int H5D_CHUNK_BTREE = H5D_CHUNK_BTREE();
+    public static final int H5D_ALLOC_TIME_DEFAULT = H5D_ALLOC_TIME_DEFAULT();
+    public static final int H5D_ALLOC_TIME_EARLY = H5D_ALLOC_TIME_EARLY();
+    public static final int H5D_ALLOC_TIME_ERROR = H5D_ALLOC_TIME_ERROR();
+    public static final int H5D_ALLOC_TIME_INCR = H5D_ALLOC_TIME_INCR();
+    public static final int H5D_ALLOC_TIME_LATE = H5D_ALLOC_TIME_LATE();
+    public static final int H5D_FILL_TIME_ERROR = H5D_FILL_TIME_ERROR();
+    public static final int H5D_FILL_TIME_ALLOC = H5D_FILL_TIME_ALLOC();
+    public static final int H5D_FILL_TIME_NEVER = H5D_FILL_TIME_NEVER();
+    public static final int H5D_FILL_TIME_IFSET = H5D_FILL_TIME_IFSET();
+    public static final int H5D_FILL_VALUE_DEFAULT = H5D_FILL_VALUE_DEFAULT();
+    public static final int H5D_FILL_VALUE_ERROR = H5D_FILL_VALUE_ERROR();
+    public static final int H5D_FILL_VALUE_UNDEFINED = H5D_FILL_VALUE_UNDEFINED();
+    public static final int H5D_FILL_VALUE_USER_DEFINED = H5D_FILL_VALUE_USER_DEFINED();
+    public static final int H5D_LAYOUT_ERROR = H5D_LAYOUT_ERROR();
+    public static final int H5D_CHUNKED = H5D_CHUNKED();
+    public static final int H5D_COMPACT = H5D_COMPACT();
+    public static final int H5D_CONTIGUOUS = H5D_CONTIGUOUS();
+    public static final int H5D_NLAYOUTS = H5D_NLAYOUTS();
+    public static final int H5D_SPACE_STATUS_ALLOCATED = H5D_SPACE_STATUS_ALLOCATED();
+    public static final int H5D_SPACE_STATUS_ERROR = H5D_SPACE_STATUS_ERROR();
+    public static final int H5D_SPACE_STATUS_NOT_ALLOCATED = H5D_SPACE_STATUS_NOT_ALLOCATED();
+    public static final int H5D_SPACE_STATUS_PART_ALLOCATED = H5D_SPACE_STATUS_PART_ALLOCATED();
+    public static final int H5E_ALIGNMENT = H5E_ALIGNMENT();
+    public static final int H5E_ALREADYEXISTS = H5E_ALREADYEXISTS();
+    public static final int H5E_ALREADYINIT = H5E_ALREADYINIT();
+    public static final int H5E_ARGS = H5E_ARGS();
+    public static final int H5E_ATOM = H5E_ATOM();
+    public static final int H5E_ATTR = H5E_ATTR();
+    public static final int H5E_BADATOM = H5E_BADATOM();
+    public static final int H5E_BADFILE = H5E_BADFILE();
+    public static final int H5E_BADGROUP = H5E_BADGROUP();
+    public static final int H5E_BADMESG = H5E_BADMESG();
+    public static final int H5E_BADRANGE = H5E_BADRANGE();
+    public static final int H5E_BADSELECT = H5E_BADSELECT();
+    public static final int H5E_BADSIZE = H5E_BADSIZE();
+    public static final int H5E_BADTYPE = H5E_BADTYPE();
+    public static final int H5E_BADVALUE = H5E_BADVALUE();
+    public static final int H5E_BTREE = H5E_BTREE();
+    public static final int H5E_CACHE = H5E_CACHE();
+    public static final int H5E_CALLBACK = H5E_CALLBACK();
+    public static final int H5E_CANAPPLY = H5E_CANAPPLY();
+    // public static final int H5E_CANTALLOC = H5E_CANTALLOC();
+    public static final int H5E_CANTCLIP = H5E_CANTCLIP();
+    public static final int H5E_CANTCLOSEFILE = H5E_CANTCLOSEFILE();
+    public static final int H5E_CANTCONVERT = H5E_CANTCONVERT();
+    public static final int H5E_CANTCOPY = H5E_CANTCOPY();
+    public static final int H5E_CANTCOUNT = H5E_CANTCOUNT();
+    public static final int H5E_CANTCREATE = H5E_CANTCREATE();
+    public static final int H5E_CANTDEC = H5E_CANTDEC();
+    public static final int H5E_CANTDECODE = H5E_CANTDECODE();
+    public static final int H5E_CANTDELETE = H5E_CANTDELETE();
+    public static final int H5E_CANTENCODE = H5E_CANTENCODE();
+    public static final int H5E_CANTFLUSH = H5E_CANTFLUSH();
+    public static final int H5E_CANTFREE = H5E_CANTFREE();
+    public static final int H5E_CANTGET = H5E_CANTGET();
+    public static final int H5E_CANTINC = H5E_CANTINC();
+    public static final int H5E_CANTINIT = H5E_CANTINIT();
+    public static final int H5E_CANTINSERT = H5E_CANTINSERT();
+    public static final int H5E_CANTLIST = H5E_CANTLIST();
+    public static final int H5E_CANTLOAD = H5E_CANTLOAD();
+    public static final int H5E_CANTLOCK = H5E_CANTLOCK();
+    public static final int H5E_CANTNEXT = H5E_CANTNEXT();
+    public static final int H5E_CANTOPENFILE = H5E_CANTOPENFILE();
+    public static final int H5E_CANTOPENOBJ = H5E_CANTOPENOBJ();
+    // public static final int H5E_CANTRECV = H5E_CANTRECV();
+    public static final int H5E_CANTREGISTER = H5E_CANTREGISTER();
+    public static final int H5E_CANTRELEASE = H5E_CANTRELEASE();
+    public static final int H5E_CANTSELECT = H5E_CANTSELECT();
+    public static final int H5E_CANTSET = H5E_CANTSET();
+    public static final int H5E_CANTSPLIT = H5E_CANTSPLIT();
+    public static final int H5E_CANTUNLOCK = H5E_CANTUNLOCK();
+    public static final int H5E_CLOSEERROR = H5E_CLOSEERROR();
+    public static final int H5E_COMPLEN = H5E_COMPLEN();
+    public static final int H5E_DATASET = H5E_DATASET();
+    public static final int H5E_DATASPACE = H5E_DATASPACE();
+    public static final int H5E_DATATYPE = H5E_DATATYPE();
+    public static final int H5E_DEFAULT = H5E_DEFAULT();
+    public static final int H5E_DUPCLASS = H5E_DUPCLASS();
+    public static final int H5E_EFL = H5E_EFL();
+    public static final int H5E_EXISTS = H5E_EXISTS();
+    public static final int H5E_FCNTL = H5E_FCNTL();
+    public static final int H5E_FILE = H5E_FILE();
+    public static final int H5E_FILEEXISTS = H5E_FILEEXISTS();
+    public static final int H5E_FILEOPEN = H5E_FILEOPEN();
+    public static final int H5E_FUNC = H5E_FUNC();
+    public static final int H5E_HEAP = H5E_HEAP();
+    public static final int H5E_INTERNAL = H5E_INTERNAL();
+    public static final int H5E_IO = H5E_IO();
+    public static final int H5E_LINK = H5E_LINK();
+    public static final int H5E_LINKCOUNT = H5E_LINKCOUNT();
+    public static final int H5E_MAJOR = H5E_MAJOR();
+    public static final int H5E_MINOR = H5E_MINOR();
+    public static final int H5E_MOUNT = H5E_MOUNT();
+    public static final int H5E_MPI = H5E_MPI();
+    public static final int H5E_MPIERRSTR = H5E_MPIERRSTR();
+    public static final int H5E_NOFILTER = H5E_NOFILTER();
+    public static final int H5E_NOIDS = H5E_NOIDS();
+    public static final int H5E_NONE_MAJOR = H5E_NONE_MAJOR();
+    public static final int H5E_NONE_MINOR = H5E_NONE_MINOR();
+    public static final int H5E_NOSPACE = H5E_NOSPACE();
+    public static final int H5E_NOTCACHED = H5E_NOTCACHED();
+    public static final int H5E_NOTFOUND = H5E_NOTFOUND();
+    public static final int H5E_NOTHDF5 = H5E_NOTHDF5();
+    public static final int H5E_OHDR = H5E_OHDR();
+    public static final int H5E_OVERFLOW = H5E_OVERFLOW();
+    public static final int H5E_PLINE = H5E_PLINE();
+    public static final int H5E_PLIST = H5E_PLIST();
+    public static final int H5E_PROTECT = H5E_PROTECT();
+    public static final int H5E_READERROR = H5E_READERROR();
+    public static final int H5E_REFERENCE = H5E_REFERENCE();
+    public static final int H5E_RESOURCE = H5E_RESOURCE();
+    public static final int H5E_RS = H5E_RS();
+    public static final int H5E_SEEKERROR = H5E_SEEKERROR();
+    public static final int H5E_SETLOCAL = H5E_SETLOCAL();
+    public static final int H5E_STORAGE = H5E_STORAGE();
+    public static final int H5E_SYM = H5E_SYM();
+    public static final int H5E_TRUNCATED = H5E_TRUNCATED();
+    public static final int H5E_TST = H5E_TST();
+    public static final int H5E_UNINITIALIZED = H5E_UNINITIALIZED();
+    public static final int H5E_UNSUPPORTED = H5E_UNSUPPORTED();
+    public static final int H5E_VERSION = H5E_VERSION();
+    public static final int H5E_VFL = H5E_VFL();
+    public static final int H5E_WALK_DOWNWARD = H5E_WALK_DOWNWARD();
+    public static final int H5E_WALK_UPWARD = H5E_WALK_UPWARD();
+    public static final int H5E_WRITEERROR = H5E_WRITEERROR();
+    
+    public static final int H5F_ACC_CREAT = H5F_ACC_CREAT();
+    public static final int H5F_ACC_DEBUG = H5F_ACC_DEBUG();
+    public static final int H5F_ACC_EXCL = H5F_ACC_EXCL();
+    public static final int H5F_ACC_RDONLY = H5F_ACC_RDONLY();
+    public static final int H5F_ACC_RDWR = H5F_ACC_RDWR();
+    public static final int H5F_ACC_TRUNC = H5F_ACC_TRUNC();
+    public static final int H5F_ACC_DEFAULT = H5F_ACC_DEFAULT();
+    public static final int H5F_CLOSE_DEFAULT = H5F_CLOSE_DEFAULT();
+    public static final int H5F_CLOSE_SEMI = H5F_CLOSE_SEMI();
+    public static final int H5F_CLOSE_STRONG = H5F_CLOSE_STRONG();
+    public static final int H5F_CLOSE_WEAK = H5F_CLOSE_WEAK();
+    public static final int H5F_LIBVER_EARLIEST = H5F_LIBVER_EARLIEST();
+    public static final int H5F_LIBVER_LATEST = H5F_LIBVER_LATEST();
+    public static final int H5F_OBJ_ALL = H5F_OBJ_ALL();
+    public static final int H5F_OBJ_ATTR = H5F_OBJ_ATTR();
+    public static final int H5F_OBJ_DATASET = H5F_OBJ_DATASET();
+    public static final int H5F_OBJ_DATATYPE = H5F_OBJ_DATATYPE();
+    public static final int H5F_OBJ_FILE = H5F_OBJ_FILE();
+    public static final int H5F_OBJ_GROUP = H5F_OBJ_GROUP();
+    public static final int H5F_OBJ_LOCAL = H5F_OBJ_LOCAL();
+    public static final int H5F_SCOPE_GLOBAL = H5F_SCOPE_GLOBAL();
+    public static final int H5F_SCOPE_LOCAL = H5F_SCOPE_LOCAL();
+    public static final int H5F_UNLIMITED = H5F_UNLIMITED();
+    
+    public static final int H5FD_CORE = H5FD_CORE();
+    public static final int H5FD_DIRECT = H5FD_DIRECT();
+    public static final int H5FD_FAMILY = H5FD_FAMILY();
+    public static final int H5FD_LOG = H5FD_LOG();
+    public static final int H5FD_MPIO = H5FD_MPIO();
+    public static final int H5FD_MULTI = H5FD_MULTI();
+    public static final int H5FD_SEC2 = H5FD_SEC2();
+    public static final int H5FD_STDIO = H5FD_STDIO();  
+    public static final int H5FD_WINDOWS = H5FD_WINDOWS();
+    public static final int H5FD_LOG_LOC_READ = H5FD_LOG_LOC_READ();
+    public static final int H5FD_LOG_LOC_WRITE = H5FD_LOG_LOC_WRITE();
+    public static final int H5FD_LOG_LOC_SEEK = H5FD_LOG_LOC_SEEK();
+    public static final int H5FD_LOG_LOC_IO = H5FD_LOG_LOC_IO();
+    public static final int H5FD_LOG_FILE_READ = H5FD_LOG_FILE_READ();
+    public static final int H5FD_LOG_FILE_WRITE = H5FD_LOG_FILE_WRITE();
+    public static final int H5FD_LOG_FILE_IO = H5FD_LOG_FILE_IO();
+    public static final int H5FD_LOG_FLAVOR = H5FD_LOG_FLAVOR();
+    public static final int H5FD_LOG_NUM_READ = H5FD_LOG_NUM_READ();
+    public static final int H5FD_LOG_NUM_WRITE = H5FD_LOG_NUM_WRITE();
+    public static final int H5FD_LOG_NUM_SEEK = H5FD_LOG_NUM_SEEK();
+    public static final int H5FD_LOG_NUM_TRUNCATE = H5FD_LOG_NUM_TRUNCATE();
+    public static final int H5FD_LOG_NUM_IO = H5FD_LOG_NUM_IO();
+    public static final int H5FD_LOG_TIME_OPEN = H5FD_LOG_TIME_OPEN();
+    public static final int H5FD_LOG_TIME_STAT = H5FD_LOG_TIME_STAT();
+    public static final int H5FD_LOG_TIME_READ = H5FD_LOG_TIME_READ();
+    public static final int H5FD_LOG_TIME_WRITE = H5FD_LOG_TIME_WRITE();
+    public static final int H5FD_LOG_TIME_SEEK = H5FD_LOG_TIME_SEEK();
+    public static final int H5FD_LOG_TIME_CLOSE = H5FD_LOG_TIME_CLOSE();
+    public static final int H5FD_LOG_TIME_IO = H5FD_LOG_TIME_IO();
+    public static final int H5FD_LOG_ALLOC = H5FD_LOG_ALLOC();
+    public static final int H5FD_LOG_ALL = H5FD_LOG_ALL();
+    public static final int H5FD_MEM_NOLIST = H5FD_MEM_NOLIST();
+    public static final int H5FD_MEM_DEFAULT = H5FD_MEM_DEFAULT();
+    public static final int H5FD_MEM_SUPER = H5FD_MEM_SUPER();
+    public static final int H5FD_MEM_BTREE = H5FD_MEM_BTREE();
+    public static final int H5FD_MEM_DRAW = H5FD_MEM_DRAW();
+    public static final int H5FD_MEM_GHEAP = H5FD_MEM_GHEAP();
+    public static final int H5FD_MEM_LHEAP = H5FD_MEM_LHEAP();
+    public static final int H5FD_MEM_OHDR = H5FD_MEM_OHDR();
+    public static final int H5FD_MEM_NTYPES = H5FD_MEM_NTYPES();
+    public static final long H5FD_DEFAULT_HADDR_SIZE = H5FD_DEFAULT_HADDR_SIZE();
+
+    public static final int H5G_DATASET = H5G_DATASET();
+    public static final int H5G_GROUP = H5G_GROUP();
+    public static final int H5G_LINK = H5G_LINK();
+    public static final int H5G_UDLINK = H5G_UDLINK();
+    public static final int H5G_LINK_ERROR = H5G_LINK_ERROR();
+    public static final int H5G_LINK_HARD = H5G_LINK_HARD();
+    public static final int H5G_LINK_SOFT = H5G_LINK_SOFT();
+    public static final int H5G_NLIBTYPES = H5G_NLIBTYPES();
+    public static final int H5G_NTYPES = H5G_NTYPES();
+    public static final int H5G_NUSERTYPES = H5G_NUSERTYPES();
+    public static final int H5G_RESERVED_5 = H5G_RESERVED_5();
+    public static final int H5G_RESERVED_6 = H5G_RESERVED_6();
+    public static final int H5G_RESERVED_7 = H5G_RESERVED_7();
+    public static final int H5G_SAME_LOC = H5G_SAME_LOC();
+    public static final int H5G_STORAGE_TYPE_UNKNOWN = H5G_STORAGE_TYPE_UNKNOWN();
+    public static final int H5G_STORAGE_TYPE_SYMBOL_TABLE = H5G_STORAGE_TYPE_SYMBOL_TABLE();
+    public static final int H5G_STORAGE_TYPE_COMPACT = H5G_STORAGE_TYPE_COMPACT();
+    public static final int H5G_STORAGE_TYPE_DENSE = H5G_STORAGE_TYPE_DENSE();
+    public static final int H5G_TYPE = H5G_TYPE();
+    public static final int H5G_UNKNOWN = H5G_UNKNOWN();
+    public static final int H5I_ATTR = H5I_ATTR();
+    public static final int H5I_BADID = H5I_BADID();
+    public static final int H5I_DATASET = H5I_DATASET();
+    public static final int H5I_DATASPACE = H5I_DATASPACE();
+    public static final int H5I_DATATYPE = H5I_DATATYPE();
+    public static final int H5I_FILE = H5I_FILE();
+    public static final int H5I_GENPROP_CLS = H5I_GENPROP_CLS();
+    public static final int H5I_GENPROP_LST = H5I_GENPROP_LST();
+    public static final int H5I_GROUP = H5I_GROUP();
+    public static final int H5I_INVALID_HID = H5I_INVALID_HID();
+    public static final int H5I_REFERENCE = H5I_REFERENCE();
+    public static final int H5I_VFL = H5I_VFL();
+    public static final int H5L_TYPE_ERROR = H5L_TYPE_ERROR();
+    public static final int H5L_TYPE_HARD = H5L_TYPE_HARD();
+    public static final int H5L_TYPE_SOFT = H5L_TYPE_SOFT();
+    public static final int H5L_TYPE_EXTERNAL = H5L_TYPE_EXTERNAL();
+    public static final int H5L_TYPE_MAX = H5L_TYPE_MAX(); 
+    public static final int H5O_COPY_SHALLOW_HIERARCHY_FLAG = H5O_COPY_SHALLOW_HIERARCHY_FLAG();
+    public static final int H5O_COPY_EXPAND_SOFT_LINK_FLAG =  H5O_COPY_EXPAND_SOFT_LINK_FLAG();
+    public static final int H5O_COPY_EXPAND_EXT_LINK_FLAG = H5O_COPY_EXPAND_EXT_LINK_FLAG();
+    public static final int H5O_COPY_EXPAND_REFERENCE_FLAG = H5O_COPY_EXPAND_REFERENCE_FLAG();
+    public static final int H5O_COPY_WITHOUT_ATTR_FLAG = H5O_COPY_WITHOUT_ATTR_FLAG();
+    public static final int H5O_COPY_PRESERVE_NULL_FLAG = H5O_COPY_PRESERVE_NULL_FLAG();
+    public static final int H5O_SHMESG_NONE_FLAG = H5O_SHMESG_NONE_FLAG();
+    public static final int H5O_SHMESG_SDSPACE_FLAG = H5O_SHMESG_SDSPACE_FLAG();
+    public static final int H5O_SHMESG_DTYPE_FLAG = H5O_SHMESG_DTYPE_FLAG();
+    public static final int H5O_SHMESG_FILL_FLAG = H5O_SHMESG_FILL_FLAG();
+    public static final int H5O_SHMESG_PLINE_FLAG = H5O_SHMESG_PLINE_FLAG();
+    public static final int H5O_SHMESG_ATTR_FLAG = H5O_SHMESG_ATTR_FLAG();
+    public static final int H5O_SHMESG_ALL_FLAG = H5O_SHMESG_ALL_FLAG();
+    public static final int H5O_TYPE_UNKNOWN = H5O_TYPE_UNKNOWN();
+    public static final int H5O_TYPE_GROUP = H5O_TYPE_GROUP();
+    public static final int H5O_TYPE_DATASET = H5O_TYPE_DATASET();
+    public static final int H5O_TYPE_NAMED_DATATYPE = H5O_TYPE_NAMED_DATATYPE();
+    public static final int H5O_TYPE_NTYPES = H5O_TYPE_NTYPES();
+    
+    public static final int H5P_ROOT = H5P_ROOT();
+    public static final int H5P_OBJECT_CREATE = H5P_OBJECT_CREATE();
+    public static final int H5P_FILE_CREATE = H5P_FILE_CREATE();
+    public static final int H5P_FILE_ACCESS = H5P_FILE_ACCESS();
+    public static final int H5P_DATASET_CREATE = H5P_DATASET_CREATE();
+    public static final int H5P_DATASET_ACCESS = H5P_DATASET_ACCESS();
+    public static final int H5P_DATASET_XFER = H5P_DATASET_XFER();
+    public static final int H5P_FILE_MOUNT = H5P_FILE_MOUNT();
+    public static final int H5P_GROUP_CREATE = H5P_GROUP_CREATE();
+    public static final int H5P_GROUP_ACCESS = H5P_GROUP_ACCESS();
+    public static final int H5P_DATATYPE_CREATE = H5P_DATATYPE_CREATE();
+    public static final int H5P_DATATYPE_ACCESS = H5P_DATATYPE_ACCESS();
+    public static final int H5P_STRING_CREATE = H5P_STRING_CREATE();
+    public static final int H5P_ATTRIBUTE_CREATE = H5P_ATTRIBUTE_CREATE();
+    public static final int H5P_OBJECT_COPY = H5P_OBJECT_COPY();
+    public static final int H5P_LINK_CREATE = H5P_LINK_CREATE();
+    public static final int H5P_LINK_ACCESS = H5P_LINK_ACCESS();
+    public static final int H5P_FILE_CREATE_DEFAULT = H5P_FILE_CREATE_DEFAULT();
+    public static final int H5P_FILE_ACCESS_DEFAULT = H5P_FILE_ACCESS_DEFAULT();
+    public static final int H5P_DATASET_CREATE_DEFAULT = H5P_DATASET_CREATE_DEFAULT();
+    public static final int H5P_DATASET_ACCESS_DEFAULT = H5P_DATASET_ACCESS_DEFAULT();
+    public static final int H5P_DATASET_XFER_DEFAULT = H5P_DATASET_XFER_DEFAULT();
+    public static final int H5P_FILE_MOUNT_DEFAULT = H5P_FILE_MOUNT_DEFAULT();
+    public static final int H5P_GROUP_CREATE_DEFAULT = H5P_GROUP_CREATE_DEFAULT();
+    public static final int H5P_GROUP_ACCESS_DEFAULT = H5P_GROUP_ACCESS_DEFAULT();
+    public static final int H5P_DATATYPE_CREATE_DEFAULT = H5P_DATATYPE_CREATE_DEFAULT();
+    public static final int H5P_DATATYPE_ACCESS_DEFAULT = H5P_DATATYPE_ACCESS_DEFAULT();
+    public static final int H5P_ATTRIBUTE_CREATE_DEFAULT = H5P_ATTRIBUTE_CREATE_DEFAULT();
+    public static final int H5P_OBJECT_COPY_DEFAULT = H5P_OBJECT_COPY_DEFAULT();
+    public static final int H5P_LINK_CREATE_DEFAULT = H5P_LINK_CREATE_DEFAULT();
+    public static final int H5P_LINK_ACCESS_DEFAULT = H5P_LINK_ACCESS_DEFAULT();
+    public static final int H5P_CRT_ORDER_TRACKED = H5P_CRT_ORDER_TRACKED();
+    public static final int H5P_CRT_ORDER_INDEXED = H5P_CRT_ORDER_INDEXED();
+    public static final int H5P_DEFAULT = H5P_DEFAULT();
+    public static final int H5P_NO_CLASS = H5P_NO_CLASS();
+
+    public static final int H5R_BADTYPE = H5R_BADTYPE();
+    public static final int H5R_DATASET_REGION = H5R_DATASET_REGION();
+    public static final int H5R_MAXTYPE = H5R_MAXTYPE();
+    public static final int H5R_OBJ_REF_BUF_SIZE = H5R_OBJ_REF_BUF_SIZE();
+    public static final int H5R_OBJECT = H5R_OBJECT();
+    public static final int H5S_ALL = H5S_ALL();
+    public static final int H5S_MAX_RANK = H5S_MAX_RANK();
+    public static final int H5S_NO_CLASS = H5S_NO_CLASS();
+    public static final int H5S_NULL = H5S_NULL();
+    public static final int H5S_SCALAR = H5S_SCALAR();
+    public static final int H5S_SEL_ALL = H5S_SEL_ALL();
+    public static final int H5S_SEL_ERROR = H5S_SEL_ERROR();
+    public static final int H5S_SEL_HYPERSLABS = H5S_SEL_HYPERSLABS();
+    public static final int H5S_SEL_N = H5S_SEL_N();
+    public static final int H5S_SEL_NONE = H5S_SEL_NONE();
+    public static final int H5S_SEL_POINTS = H5S_SEL_POINTS();
+    public static final int H5S_SELECT_AND = H5S_SELECT_AND();
+    public static final int H5S_SELECT_APPEND = H5S_SELECT_APPEND();
+    public static final int H5S_SELECT_INVALID = H5S_SELECT_INVALID();
+    public static final int H5S_SELECT_NOOP = H5S_SELECT_NOOP();
+    public static final int H5S_SELECT_NOTA = H5S_SELECT_NOTA();
+    public static final int H5S_SELECT_NOTB = H5S_SELECT_NOTB();
+    public static final int H5S_SELECT_OR = H5S_SELECT_OR();
+    public static final int H5S_SELECT_PREPEND = H5S_SELECT_PREPEND();
+    public static final int H5S_SELECT_SET = H5S_SELECT_SET();
+    public static final int H5S_SELECT_XOR = H5S_SELECT_XOR();
+    public static final int H5S_SIMPLE = H5S_SIMPLE();
+    public static final int H5S_UNLIMITED = H5S_UNLIMITED();
+    public static final int H5T_ALPHA_B16 = H5T_ALPHA_B16();
+    public static final int H5T_ALPHA_B32 = H5T_ALPHA_B32();
+    public static final int H5T_ALPHA_B64 = H5T_ALPHA_B64();
+    public static final int H5T_ALPHA_B8 = H5T_ALPHA_B8();
+    public static final int H5T_ALPHA_F32 = H5T_ALPHA_F32();
+    public static final int H5T_ALPHA_F64 = H5T_ALPHA_F64();
+    public static final int H5T_ALPHA_I16 = H5T_ALPHA_I16();
+    public static final int H5T_ALPHA_I32 = H5T_ALPHA_I32();
+    public static final int H5T_ALPHA_I64 = H5T_ALPHA_I64();
+    public static final int H5T_ALPHA_I8 = H5T_ALPHA_I8();
+    public static final int H5T_ALPHA_U16 = H5T_ALPHA_U16();
+    public static final int H5T_ALPHA_U32 = H5T_ALPHA_U32();
+    public static final int H5T_ALPHA_U64 = H5T_ALPHA_U64();
+    public static final int H5T_ALPHA_U8 = H5T_ALPHA_U8();
+    public static final int H5T_ARRAY = H5T_ARRAY();
+    public static final int H5T_BITFIELD = H5T_BITFIELD();
+    public static final int H5T_BKG_NO = H5T_BKG_NO();
+    public static final int H5T_BKG_YES = H5T_BKG_YES();
+    public static final int H5T_C_S1 = H5T_C_S1();
+    public static final int H5T_COMPOUND = H5T_COMPOUND();
+    public static final int H5T_CONV_CONV = H5T_CONV_CONV();
+    public static final int H5T_CONV_FREE = H5T_CONV_FREE();
+    public static final int H5T_CONV_INIT = H5T_CONV_INIT();
+    public static final int H5T_CSET_ERROR = H5T_CSET_ERROR();
+    public static final int H5T_CSET_ASCII = H5T_CSET_ASCII();
+    public static final int H5T_CSET_UTF8 = H5T_CSET_UTF8();
+    public static final int H5T_CSET_RESERVED_10 = H5T_CSET_RESERVED_10();
+    public static final int H5T_CSET_RESERVED_11 = H5T_CSET_RESERVED_11();
+    public static final int H5T_CSET_RESERVED_12 = H5T_CSET_RESERVED_12();
+    public static final int H5T_CSET_RESERVED_13 = H5T_CSET_RESERVED_13();
+    public static final int H5T_CSET_RESERVED_14 = H5T_CSET_RESERVED_14();
+    public static final int H5T_CSET_RESERVED_15 = H5T_CSET_RESERVED_15();
+    public static final int H5T_CSET_RESERVED_2 = H5T_CSET_RESERVED_2();
+    public static final int H5T_CSET_RESERVED_3 = H5T_CSET_RESERVED_3();
+    public static final int H5T_CSET_RESERVED_4 = H5T_CSET_RESERVED_4();
+    public static final int H5T_CSET_RESERVED_5 = H5T_CSET_RESERVED_5();
+    public static final int H5T_CSET_RESERVED_6 = H5T_CSET_RESERVED_6();
+    public static final int H5T_CSET_RESERVED_7 = H5T_CSET_RESERVED_7();
+    public static final int H5T_CSET_RESERVED_8 = H5T_CSET_RESERVED_8();
+    public static final int H5T_CSET_RESERVED_9 = H5T_CSET_RESERVED_9();
+    public static final int H5T_DIR_ASCEND = H5T_DIR_ASCEND();
+    public static final int H5T_DIR_DEFAULT = H5T_DIR_DEFAULT();
+    public static final int H5T_DIR_DESCEND = H5T_DIR_DESCEND();
+    public static final int H5T_ENUM = H5T_ENUM();
+    public static final int H5T_FLOAT = H5T_FLOAT();
+    public static final int H5T_FORTRAN_S1 = H5T_FORTRAN_S1();
+    public static final int H5T_IEEE_F32BE = H5T_IEEE_F32BE();
+    public static final int H5T_IEEE_F32LE = H5T_IEEE_F32LE();
+    public static final int H5T_IEEE_F64BE = H5T_IEEE_F64BE();
+    public static final int H5T_IEEE_F64LE = H5T_IEEE_F64LE();
+    public static final int H5T_INTEGER = H5T_INTEGER();
+    public static final int H5T_INTEL_B16 = H5T_INTEL_B16();
+    public static final int H5T_INTEL_B32 = H5T_INTEL_B32();
+    public static final int H5T_INTEL_B64 = H5T_INTEL_B64();
+    public static final int H5T_INTEL_B8 = H5T_INTEL_B8();
+    public static final int H5T_INTEL_F32 = H5T_INTEL_F32();
+    public static final int H5T_INTEL_F64 = H5T_INTEL_F64();
+    public static final int H5T_INTEL_I16 = H5T_INTEL_I16();
+    public static final int H5T_INTEL_I32 = H5T_INTEL_I32();
+    public static final int H5T_INTEL_I64 = H5T_INTEL_I64();
+    public static final int H5T_INTEL_I8 = H5T_INTEL_I8();
+    public static final int H5T_INTEL_U16 = H5T_INTEL_U16();
+    public static final int H5T_INTEL_U32 = H5T_INTEL_U32();
+    public static final int H5T_INTEL_U64 = H5T_INTEL_U64();
+    public static final int H5T_INTEL_U8 = H5T_INTEL_U8();
+    public static final int H5T_MIPS_B16 = H5T_MIPS_B16();
+    public static final int H5T_MIPS_B32 = H5T_MIPS_B32();
+    public static final int H5T_MIPS_B64 = H5T_MIPS_B64();
+    public static final int H5T_MIPS_B8 = H5T_MIPS_B8();
+    public static final int H5T_MIPS_F32 = H5T_MIPS_F32();
+    public static final int H5T_MIPS_F64 = H5T_MIPS_F64();
+    public static final int H5T_MIPS_I16 = H5T_MIPS_I16();
+    public static final int H5T_MIPS_I32 = H5T_MIPS_I32();
+    public static final int H5T_MIPS_I64 = H5T_MIPS_I64();
+    public static final int H5T_MIPS_I8 = H5T_MIPS_I8();
+    public static final int H5T_MIPS_U16 = H5T_MIPS_U16();
+    public static final int H5T_MIPS_U32 = H5T_MIPS_U32();
+    public static final int H5T_MIPS_U64 = H5T_MIPS_U64();
+    public static final int H5T_MIPS_U8 = H5T_MIPS_U8();
+    public static final int H5T_NATIVE_B16 = H5T_NATIVE_B16();
+    public static final int H5T_NATIVE_B32 = H5T_NATIVE_B32();
+    public static final int H5T_NATIVE_B64 = H5T_NATIVE_B64();
+    public static final int H5T_NATIVE_B8 = H5T_NATIVE_B8();
+    public static final int H5T_NATIVE_CHAR = H5T_NATIVE_CHAR();
+    public static final int H5T_NATIVE_DOUBLE = H5T_NATIVE_DOUBLE();
+    public static final int H5T_NATIVE_FLOAT = H5T_NATIVE_FLOAT();
+    public static final int H5T_NATIVE_HADDR = H5T_NATIVE_HADDR();
+    public static final int H5T_NATIVE_HBOOL = H5T_NATIVE_HBOOL();
+    public static final int H5T_NATIVE_HERR = H5T_NATIVE_HERR();
+    public static final int H5T_NATIVE_HSIZE = H5T_NATIVE_HSIZE();
+    public static final int H5T_NATIVE_HSSIZE = H5T_NATIVE_HSSIZE();
+    public static final int H5T_NATIVE_INT = H5T_NATIVE_INT();
+    public static final int H5T_NATIVE_INT_FAST16 = H5T_NATIVE_INT_FAST16();
+    public static final int H5T_NATIVE_INT_FAST32 = H5T_NATIVE_INT_FAST32();
+    public static final int H5T_NATIVE_INT_FAST64 = H5T_NATIVE_INT_FAST64();
+    public static final int H5T_NATIVE_INT_FAST8 = H5T_NATIVE_INT_FAST8();
+    public static final int H5T_NATIVE_INT_LEAST16 = H5T_NATIVE_INT_LEAST16();
+    public static final int H5T_NATIVE_INT_LEAST32 = H5T_NATIVE_INT_LEAST32();
+    public static final int H5T_NATIVE_INT_LEAST64 = H5T_NATIVE_INT_LEAST64();
+    public static final int H5T_NATIVE_INT_LEAST8 = H5T_NATIVE_INT_LEAST8();
+    public static final int H5T_NATIVE_INT16 = H5T_NATIVE_INT16();
+    public static final int H5T_NATIVE_INT32 = H5T_NATIVE_INT32();
+    public static final int H5T_NATIVE_INT64 = H5T_NATIVE_INT64();
+    public static final int H5T_NATIVE_INT8 = H5T_NATIVE_INT8();
+    public static final int H5T_NATIVE_LDOUBLE = H5T_NATIVE_LDOUBLE();
+    public static final int H5T_NATIVE_LLONG = H5T_NATIVE_LLONG();
+    public static final int H5T_NATIVE_LONG = H5T_NATIVE_LONG();
+    public static final int H5T_NATIVE_OPAQUE = H5T_NATIVE_OPAQUE();
+    public static final int H5T_NATIVE_SCHAR = H5T_NATIVE_SCHAR();
+    public static final int H5T_NATIVE_SHORT = H5T_NATIVE_SHORT();
+    public static final int H5T_NATIVE_UCHAR = H5T_NATIVE_UCHAR();
+    public static final int H5T_NATIVE_UINT = H5T_NATIVE_UINT();
+    public static final int H5T_NATIVE_UINT_FAST16 = H5T_NATIVE_UINT_FAST16();
+    public static final int H5T_NATIVE_UINT_FAST32 = H5T_NATIVE_UINT_FAST32();
+    public static final int H5T_NATIVE_UINT_FAST64 = H5T_NATIVE_UINT_FAST64();
+    public static final int H5T_NATIVE_UINT_FAST8 = H5T_NATIVE_UINT_FAST8();
+    public static final int H5T_NATIVE_UINT_LEAST16 = H5T_NATIVE_UINT_LEAST16();
+    public static final int H5T_NATIVE_UINT_LEAST32 = H5T_NATIVE_UINT_LEAST32();
+    public static final int H5T_NATIVE_UINT_LEAST64 = H5T_NATIVE_UINT_LEAST64();
+    public static final int H5T_NATIVE_UINT_LEAST8 = H5T_NATIVE_UINT_LEAST8();
+    public static final int H5T_NATIVE_UINT16 = H5T_NATIVE_UINT16();
+    public static final int H5T_NATIVE_UINT32 = H5T_NATIVE_UINT32();
+    public static final int H5T_NATIVE_UINT64 = H5T_NATIVE_UINT64();
+    public static final int H5T_NATIVE_UINT8 = H5T_NATIVE_UINT8();
+    public static final int H5T_NATIVE_ULLONG = H5T_NATIVE_ULLONG();
+    public static final int H5T_NATIVE_ULONG = H5T_NATIVE_ULONG();
+    public static final int H5T_NATIVE_USHORT = H5T_NATIVE_USHORT();
+    public static final int H5T_NCLASSES = H5T_NCLASSES();
+    public static final int H5T_NO_CLASS = H5T_NO_CLASS();
+    public static final int H5T_NORM_ERROR = H5T_NORM_ERROR();
+    public static final int H5T_NORM_IMPLIED = H5T_NORM_IMPLIED();
+    public static final int H5T_NORM_MSBSET = H5T_NORM_MSBSET();
+    public static final int H5T_NORM_NONE = H5T_NORM_NONE();
+    public static final int H5T_NPAD = H5T_NPAD();
+    public static final int H5T_NSGN = H5T_NSGN();
+    public static final int H5T_OPAQUE = H5T_OPAQUE();
+    public static final int H5T_OPAQUE_TAG_MAX = H5T_OPAQUE_TAG_MAX(); /* 1.6.5 */
+    public static final int H5T_ORDER_BE = H5T_ORDER_BE();
+    public static final int H5T_ORDER_ERROR = H5T_ORDER_ERROR();
+    public static final int H5T_ORDER_LE = H5T_ORDER_LE();
+    public static final int H5T_ORDER_NONE = H5T_ORDER_NONE();
+    public static final int H5T_ORDER_VAX = H5T_ORDER_VAX();
+    public static final int H5T_PAD_BACKGROUND = H5T_PAD_BACKGROUND();
+    public static final int H5T_PAD_ERROR = H5T_PAD_ERROR();
+    public static final int H5T_PAD_ONE = H5T_PAD_ONE();
+    public static final int H5T_PAD_ZERO = H5T_PAD_ZERO();
+    public static final int H5T_PERS_DONTCARE = H5T_PERS_DONTCARE();
+    public static final int H5T_PERS_HARD = H5T_PERS_HARD();
+    public static final int H5T_PERS_SOFT = H5T_PERS_SOFT();
+    public static final int H5T_REFERENCE = H5T_REFERENCE();
+    public static final int H5T_SGN_2 = H5T_SGN_2();
+    public static final int H5T_SGN_ERROR = H5T_SGN_ERROR();
+    public static final int H5T_SGN_NONE = H5T_SGN_NONE();
+    public static final int H5T_STD_B16BE = H5T_STD_B16BE();
+    public static final int H5T_STD_B16LE = H5T_STD_B16LE();
+    public static final int H5T_STD_B32BE = H5T_STD_B32BE();
+    public static final int H5T_STD_B32LE = H5T_STD_B32LE();
+    public static final int H5T_STD_B64BE = H5T_STD_B64BE();
+    public static final int H5T_STD_B64LE = H5T_STD_B64LE();
+    public static final int H5T_STD_B8BE = H5T_STD_B8BE();
+    public static final int H5T_STD_B8LE = H5T_STD_B8LE();
+    public static final int H5T_STD_I16BE = H5T_STD_I16BE();
+    public static final int H5T_STD_I16LE = H5T_STD_I16LE();
+    public static final int H5T_STD_I32BE = H5T_STD_I32BE();
+    public static final int H5T_STD_I32LE = H5T_STD_I32LE();
+    public static final int H5T_STD_I64BE = H5T_STD_I64BE();
+    public static final int H5T_STD_I64LE = H5T_STD_I64LE();
+    public static final int H5T_STD_I8BE = H5T_STD_I8BE();
+    public static final int H5T_STD_I8LE = H5T_STD_I8LE();
+    public static final int H5T_STD_REF_DSETREG = H5T_STD_REF_DSETREG();
+    public static final int H5T_STD_REF_OBJ = H5T_STD_REF_OBJ();
+    public static final int H5T_STD_U16BE = H5T_STD_U16BE();
+    public static final int H5T_STD_U16LE = H5T_STD_U16LE();
+    public static final int H5T_STD_U32BE = H5T_STD_U32BE();
+    public static final int H5T_STD_U32LE = H5T_STD_U32LE();
+    public static final int H5T_STD_U64BE = H5T_STD_U64BE();
+    public static final int H5T_STD_U64LE = H5T_STD_U64LE();
+    public static final int H5T_STD_U8BE = H5T_STD_U8BE();
+    public static final int H5T_STD_U8LE = H5T_STD_U8LE();
+    public static final int H5T_STR_ERROR = H5T_STR_ERROR();
+    public static final int H5T_STR_NULLPAD = H5T_STR_NULLPAD();
+    public static final int H5T_STR_NULLTERM = H5T_STR_NULLTERM();
+    public static final int H5T_STR_RESERVED_10 = H5T_STR_RESERVED_10();
+    public static final int H5T_STR_RESERVED_11 = H5T_STR_RESERVED_11();
+    public static final int H5T_STR_RESERVED_12 = H5T_STR_RESERVED_12();
+    public static final int H5T_STR_RESERVED_13 = H5T_STR_RESERVED_13();
+    public static final int H5T_STR_RESERVED_14 = H5T_STR_RESERVED_14();
+    public static final int H5T_STR_RESERVED_15 = H5T_STR_RESERVED_15();
+    public static final int H5T_STR_RESERVED_3 = H5T_STR_RESERVED_3();
+    public static final int H5T_STR_RESERVED_4 = H5T_STR_RESERVED_4();
+    public static final int H5T_STR_RESERVED_5 = H5T_STR_RESERVED_5();
+    public static final int H5T_STR_RESERVED_6 = H5T_STR_RESERVED_6();
+    public static final int H5T_STR_RESERVED_7 = H5T_STR_RESERVED_7();
+    public static final int H5T_STR_RESERVED_8 = H5T_STR_RESERVED_8();
+    public static final int H5T_STR_RESERVED_9 = H5T_STR_RESERVED_9();
+    public static final int H5T_STR_SPACEPAD = H5T_STR_SPACEPAD();
+    public static final int H5T_STRING = H5T_STRING();
+    public static final int H5T_TIME = H5T_TIME();
+    public static final int H5T_UNIX_D32BE = H5T_UNIX_D32BE();
+    public static final int H5T_UNIX_D32LE = H5T_UNIX_D32LE();
+    public static final int H5T_UNIX_D64BE = H5T_UNIX_D64BE();
+    public static final int H5T_UNIX_D64LE = H5T_UNIX_D64LE();
+    public static final int H5T_VARIABLE = H5T_VARIABLE();
+    public static final int H5T_VLEN = H5T_VLEN();
+    public static final int H5Z_CB_CONT = H5Z_CB_CONT();
+    public static final int H5Z_CB_ERROR = H5Z_CB_ERROR();
+    public static final int H5Z_CB_FAIL = H5Z_CB_FAIL();
+    public static final int H5Z_CB_NO = H5Z_CB_NO();
+    public static final int H5Z_DISABLE_EDC = H5Z_DISABLE_EDC();
+    public static final int H5Z_ENABLE_EDC = H5Z_ENABLE_EDC();
+    public static final int H5Z_ERROR_EDC = H5Z_ERROR_EDC();
+    public static final int H5Z_FILTER_DEFLATE = H5Z_FILTER_DEFLATE();
+    public static final int H5Z_FILTER_ERROR = H5Z_FILTER_ERROR();
+    public static final int H5Z_FILTER_FLETCHER32 = H5Z_FILTER_FLETCHER32();
+    public static final int H5Z_FILTER_MAX = H5Z_FILTER_MAX();
+    public static final int H5Z_FILTER_NBIT = H5Z_FILTER_NBIT();
+    public static final int H5Z_FILTER_NONE = H5Z_FILTER_NONE();
+    public static final int H5Z_FILTER_RESERVED = H5Z_FILTER_RESERVED();
+    public static final int H5Z_FILTER_SCALEOFFSET = H5Z_FILTER_SCALEOFFSET();
+    public static final int H5Z_FILTER_SHUFFLE = H5Z_FILTER_SHUFFLE();
+    public static final int H5Z_FILTER_SZIP = H5Z_FILTER_SZIP();
+    public static final int H5Z_FLAG_DEFMASK = H5Z_FLAG_DEFMASK();
+    public static final int H5Z_FLAG_INVMASK = H5Z_FLAG_INVMASK();
+    public static final int H5Z_FLAG_MANDATORY = H5Z_FLAG_MANDATORY();
+    public static final int H5Z_FLAG_OPTIONAL = H5Z_FLAG_OPTIONAL();
+    public static final int H5Z_FLAG_REVERSE = H5Z_FLAG_REVERSE();
+    public static final int H5Z_FLAG_SKIP_EDC = H5Z_FLAG_SKIP_EDC();
+    public static final int H5Z_MAX_NFILTERS = H5Z_MAX_NFILTERS();
+    public static final int H5Z_NO_EDC = H5Z_NO_EDC();
+    public static final int H5Z_FILTER_CONFIG_ENCODE_ENABLED = H5Z_FILTER_CONFIG_ENCODE_ENABLED();
+    public static final int H5Z_FILTER_CONFIG_DECODE_ENABLED = H5Z_FILTER_CONFIG_DECODE_ENABLED();
+    public static final int H5Z_SO_INT_MINBITS_DEFAULT = H5Z_SO_INT_MINBITS_DEFAULT();
+    public static final int H5Z_SO_FLOAT_DSCALE = H5Z_SO_FLOAT_DSCALE();
+    public static final int H5Z_SO_FLOAT_ESCALE = H5Z_SO_FLOAT_ESCALE();
+    public static final int H5Z_SO_INT = H5Z_SO_INT();
+    public static final int H5Z_SHUFFLE_USER_NPARMS = H5Z_SHUFFLE_USER_NPARMS();
+    public static final int H5Z_SHUFFLE_TOTAL_NPARMS = H5Z_SHUFFLE_TOTAL_NPARMS();
+    public static final int H5Z_SZIP_USER_NPARMS = H5Z_SZIP_USER_NPARMS();
+    public static final int H5Z_SZIP_TOTAL_NPARMS = H5Z_SZIP_TOTAL_NPARMS();
+    public static final int H5Z_SZIP_PARM_MASK = H5Z_SZIP_PARM_MASK();
+    public static final int H5Z_SZIP_PARM_PPB = H5Z_SZIP_PARM_PPB();
+    public static final int H5Z_SZIP_PARM_BPP = H5Z_SZIP_PARM_BPP();
+    public static final int H5Z_SZIP_PARM_PPS = H5Z_SZIP_PARM_PPS();
+    public static final int H5Z_NBIT_USER_NPARMS = H5Z_NBIT_USER_NPARMS();
+    public static final int H5Z_SCALEOFFSET_USER_NPARMS = H5Z_SCALEOFFSET_USER_NPARMS();
+    public static final int H5Z_FILTER_ALL = H5Z_FILTER_ALL();
+
+    // /////////////////////////////////////////////////////////////////////////
+    // List of private native variables to get constant values from C //
+    // DO NOT EDIT THE LIST UNLESS YOU KNOW WHAT YOU DO!!! //
+    // /////////////////////////////////////////////////////////////////////////
+
+    private static native final long H5_QUARTER_HADDR_MAX();
+
+    private static native final int H5_SZIP_MAX_PIXELS_PER_BLOCK();
+
+    private static native final int H5_SZIP_NN_OPTION_MASK();
+
+    private static native final int H5_SZIP_EC_OPTION_MASK();
+
+    private static native final int H5_SZIP_ALLOW_K13_OPTION_MASK();
+
+    private static native final int H5_SZIP_CHIP_OPTION_MASK();
+
+    private static native final int H5_INDEX_UNKNOWN();
+
+    private static native final int H5_INDEX_NAME();
+
+    private static native final int H5_INDEX_CRT_ORDER();
+
+    private static native final int H5_INDEX_N();
+
+    private static native final int H5_ITER_UNKNOWN();
+
+    private static native final int H5_ITER_INC();
+
+    private static native final int H5_ITER_DEC();
+
+    private static native final int H5_ITER_NATIVE();
+
+    private static native final int H5_ITER_N();
+
+    private static native final int H5AC_CURR_CACHE_CONFIG_VERSION();
+
+    private static native final int H5AC_MAX_TRACE_FILE_NAME_LEN();
+
+    private static native final int H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY();
+
+    private static native final int H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED();
+
+    private static native final int H5C_incr_off();
+
+    private static native final int H5C_incr_threshold();
+
+    private static native final int H5C_flash_incr_off();
+
+    private static native final int H5C_flash_incr_add_space();
+
+    private static native final int H5C_decr_off();
+
+    private static native final int H5C_decr_threshold();
+
+    private static native final int H5C_decr_age_out();
+
+    private static native final int H5C_decr_age_out_with_threshold();
+
+    private static native final int H5D_CHUNK_BTREE();
+
+    private static native final int H5D_ALLOC_TIME_DEFAULT();
+
+    private static native final int H5D_ALLOC_TIME_EARLY();
+
+    private static native final int H5D_ALLOC_TIME_ERROR();
+
+    private static native final int H5D_ALLOC_TIME_INCR();
+
+    private static native final int H5D_ALLOC_TIME_LATE();
+
+    private static native final int H5D_FILL_TIME_ERROR();
+
+    private static native final int H5D_FILL_TIME_ALLOC();
+
+    private static native final int H5D_FILL_TIME_NEVER();
+
+    private static native final int H5D_FILL_TIME_IFSET();
+
+    private static native final int H5D_FILL_VALUE_DEFAULT();
+
+    private static native final int H5D_FILL_VALUE_ERROR();
+
+    private static native final int H5D_FILL_VALUE_UNDEFINED();
+
+    private static native final int H5D_FILL_VALUE_USER_DEFINED();
+
+    private static native final int H5D_LAYOUT_ERROR();
+
+    private static native final int H5D_CHUNKED();
+
+    private static native final int H5D_COMPACT();
+
+    private static native final int H5D_CONTIGUOUS();
+
+    private static native final int H5D_NLAYOUTS();
+
+    private static native final int H5D_SPACE_STATUS_ALLOCATED();
+
+    private static native final int H5D_SPACE_STATUS_ERROR();
+
+    private static native final int H5D_SPACE_STATUS_NOT_ALLOCATED();
+
+    private static native final int H5D_SPACE_STATUS_PART_ALLOCATED();
+
+    private static native final int H5E_ALIGNMENT();
+
+    private static native final int H5E_ALREADYEXISTS();
+
+    private static native final int H5E_ALREADYINIT();
+
+    private static native final int H5E_ARGS();
+
+    private static native final int H5E_ATOM();
+
+    private static native final int H5E_ATTR();
+
+    private static native final int H5E_BADATOM();
+
+    private static native final int H5E_BADFILE();
+
+    private static native final int H5E_BADGROUP();
+
+    private static native final int H5E_BADMESG();
+
+    private static native final int H5E_BADRANGE();
+
+    private static native final int H5E_BADSELECT();
+
+    private static native final int H5E_BADSIZE();
+
+    private static native final int H5E_BADTYPE();
+
+    private static native final int H5E_BADVALUE();
+
+    private static native final int H5E_BTREE();
+
+    private static native final int H5E_CACHE();
+
+    private static native final int H5E_CALLBACK();
+
+    private static native final int H5E_CANAPPLY();
+
+    // private static native final int H5E_CANTALLOC();
+    private static native final int H5E_CANTCLIP();
+
+    private static native final int H5E_CANTCLOSEFILE();
+
+    private static native final int H5E_CANTCONVERT();
+
+    private static native final int H5E_CANTCOPY();
+
+    private static native final int H5E_CANTCOUNT();
+
+    private static native final int H5E_CANTCREATE();
+
+    private static native final int H5E_CANTDEC();
+
+    private static native final int H5E_CANTDECODE();
+
+    private static native final int H5E_CANTDELETE();
+
+    private static native final int H5E_CANTENCODE();
+
+    private static native final int H5E_CANTFLUSH();
+
+    private static native final int H5E_CANTFREE();
+
+    private static native final int H5E_CANTGET();
+
+    private static native final int H5E_CANTINC();
+
+    private static native final int H5E_CANTINIT();
+
+    private static native final int H5E_CANTINSERT();
+
+    private static native final int H5E_CANTLIST();
+
+    private static native final int H5E_CANTLOAD();
+
+    private static native final int H5E_CANTLOCK();
+
+    private static native final int H5E_CANTNEXT();
+
+    private static native final int H5E_CANTOPENFILE();
+
+    private static native final int H5E_CANTOPENOBJ();
+
+    // private static native final int H5E_CANTRECV();
+    private static native final int H5E_CANTREGISTER();
+
+    private static native final int H5E_CANTRELEASE();
+
+    private static native final int H5E_CANTSELECT();
+
+    private static native final int H5E_CANTSET();
+
+    private static native final int H5E_CANTSPLIT();
+
+    private static native final int H5E_CANTUNLOCK();
+
+    private static native final int H5E_CLOSEERROR();
+
+    private static native final int H5E_COMPLEN();
+
+    private static native final int H5E_DATASET();
+
+    private static native final int H5E_DATASPACE();
+
+    private static native final int H5E_DATATYPE();
+
+    private static native final int H5E_DEFAULT();
+
+    private static native final int H5E_DUPCLASS();
+
+    private static native final int H5E_EFL();
+
+    private static native final int H5E_EXISTS();
+
+    private static native final int H5E_FCNTL();
+
+    private static native final int H5E_FILE();
+
+    private static native final int H5E_FILEEXISTS();
+
+    private static native final int H5E_FILEOPEN();
+
+    private static native final int H5E_FUNC();
+
+    private static native final int H5E_HEAP();
+
+    private static native final int H5E_INTERNAL();
+
+    private static native final int H5E_IO();
+
+    private static native final int H5E_LINK();
+
+    private static native final int H5E_LINKCOUNT();
+
+    private static native final int H5E_MAJOR();
+
+    private static native final int H5E_MINOR();
+
+    private static native final int H5E_MOUNT();
+
+    private static native final int H5E_MPI();
+
+    private static native final int H5E_MPIERRSTR();
+
+    private static native final int H5E_NOFILTER();
+
+    private static native final int H5E_NOIDS();
+
+    private static native final int H5E_NONE_MAJOR();
+
+    private static native final int H5E_NONE_MINOR();
+
+    private static native final int H5E_NOSPACE();
+
+    private static native final int H5E_NOTCACHED();
+
+    private static native final int H5E_NOTFOUND();
+
+    private static native final int H5E_NOTHDF5();
+
+    private static native final int H5E_OHDR();
+
+    private static native final int H5E_OVERFLOW();
+
+    private static native final int H5E_PLINE();
+
+    private static native final int H5E_PLIST();
+
+    private static native final int H5E_PROTECT();
+
+    private static native final int H5E_READERROR();
+
+    private static native final int H5E_REFERENCE();
+
+    private static native final int H5E_RESOURCE();
+
+    private static native final int H5E_RS();
+
+    private static native final int H5E_SEEKERROR();
+
+    private static native final int H5E_SETLOCAL();
+
+    private static native final int H5E_STORAGE();
+
+    private static native final int H5E_SYM();
+
+    private static native final int H5E_TRUNCATED();
+
+    private static native final int H5E_TST();
+
+    private static native final int H5E_UNINITIALIZED();
+
+    private static native final int H5E_UNSUPPORTED();
+
+    private static native final int H5E_VERSION();
+
+    private static native final int H5E_VFL();
+
+    private static native final int H5E_WALK_DOWNWARD();
+
+    private static native final int H5E_WALK_UPWARD();
+
+    private static native final int H5E_WRITEERROR();
+
+    private static native final int H5F_ACC_CREAT();
+
+    private static native final int H5F_ACC_DEBUG();
+
+    private static native final int H5F_ACC_EXCL();
+
+    private static native final int H5F_ACC_RDONLY();
+
+    private static native final int H5F_ACC_RDWR();
+
+    private static native final int H5F_ACC_TRUNC();
+
+    private static native final int H5F_ACC_DEFAULT();
+
+    private static native final int H5F_CLOSE_DEFAULT();
+
+    private static native final int H5F_CLOSE_SEMI();
+
+    private static native final int H5F_CLOSE_STRONG();
+
+    private static native final int H5F_CLOSE_WEAK();
+    
+    private static native final int H5F_LIBVER_EARLIEST();
+    
+    private static native final int H5F_LIBVER_LATEST();
+    
+    private static native final int H5F_OBJ_ALL();
+
+    private static native final int H5F_OBJ_ATTR();
+
+    private static native final int H5F_OBJ_DATASET();
+
+    private static native final int H5F_OBJ_DATATYPE();
+
+    private static native final int H5F_OBJ_FILE();
+
+    private static native final int H5F_OBJ_GROUP();
+
+    private static native final int H5F_OBJ_LOCAL(); /* 1.6.5 */
+
+    private static native final int H5F_SCOPE_DOWN();
+
+    private static native final int H5F_SCOPE_GLOBAL();
+
+    private static native final int H5F_SCOPE_LOCAL();
+
+    private static native final int H5F_UNLIMITED();
+
+    private static native final int H5FD_CORE();
+
+    private static native final int H5FD_DIRECT();
+
+    private static native final int H5FD_FAMILY();
+
+    private static native final int H5FD_LOG();
+
+    private static native final int H5FD_MPIO();
+
+    private static native final int H5FD_MULTI();
+
+    private static native final int H5FD_SEC2();
+
+    private static native final int H5FD_STDIO();
+   
+    private static native final int H5FD_WINDOWS();
+    
+    private static native final int H5FD_LOG_LOC_READ();
+    
+    private static native final int H5FD_LOG_LOC_WRITE();
+    
+    private static native final int H5FD_LOG_LOC_SEEK();
+    
+    private static native final int H5FD_LOG_LOC_IO();
+    
+    private static native final int H5FD_LOG_FILE_READ();
+    
+    private static native final int H5FD_LOG_FILE_WRITE();
+    
+    private static native final int H5FD_LOG_FILE_IO();
+    
+    private static native final int H5FD_LOG_FLAVOR();
+    
+    private static native final int H5FD_LOG_NUM_READ();
+    
+    private static native final int H5FD_LOG_NUM_WRITE();
+    
+    private static native final int H5FD_LOG_NUM_SEEK();
+    
+    private static native final int H5FD_LOG_NUM_TRUNCATE();
+    
+    private static native final int H5FD_LOG_NUM_IO();
+    
+    private static native final int H5FD_LOG_TIME_OPEN();
+    
+    private static native final int H5FD_LOG_TIME_STAT();
+    
+    private static native final int H5FD_LOG_TIME_READ();
+    
+    private static native final int H5FD_LOG_TIME_WRITE();
+    
+    private static native final int H5FD_LOG_TIME_SEEK();
+    
+    private static native final int H5FD_LOG_TIME_CLOSE();
+    
+    private static native final int H5FD_LOG_TIME_IO();
+    
+    private static native final int H5FD_LOG_ALLOC();
+    
+    private static native final int H5FD_LOG_ALL();
+    
+    private static native final int H5FD_MEM_NOLIST();
+    
+    private static native final int H5FD_MEM_DEFAULT();
+    
+    private static native final int H5FD_MEM_SUPER();
+    
+    private static native final int H5FD_MEM_BTREE();
+    
+    private static native final int H5FD_MEM_DRAW();
+    
+    private static native final int H5FD_MEM_GHEAP();
+    
+    private static native final int H5FD_MEM_LHEAP();
+    
+    private static native final int H5FD_MEM_OHDR();
+    
+    private static native final int H5FD_MEM_NTYPES();
+    
+    private static native final long H5FD_DEFAULT_HADDR_SIZE();
+
+    private static native final int H5G_DATASET();
+
+    private static native final int H5G_GROUP();
+
+    private static native final int H5G_LINK();
+
+    private static native final int H5G_UDLINK();
+
+    private static native final int H5G_LINK_ERROR();
+
+    private static native final int H5G_LINK_HARD();
+
+    private static native final int H5G_LINK_SOFT();
+
+    private static native final int H5G_NLIBTYPES();
+
+    private static native final int H5G_NTYPES();
+
+    private static native final int H5G_NUSERTYPES();
+
+    private static native final int H5G_RESERVED_5();
+
+    private static native final int H5G_RESERVED_6();
+
+    private static native final int H5G_RESERVED_7();
+
+    private static native final int H5G_SAME_LOC();
+    
+    private static native final int H5G_STORAGE_TYPE_UNKNOWN();
+    
+    private static native final int H5G_STORAGE_TYPE_SYMBOL_TABLE();
+    
+    private static native final int H5G_STORAGE_TYPE_COMPACT();
+    
+    private static native final int H5G_STORAGE_TYPE_DENSE();
+
+    private static native final int H5G_TYPE();
+
+    private static native final int H5G_UNKNOWN();
+
+    private static native final int H5I_ATTR();
+
+    private static native final int H5I_BADID();
+
+    private static native final int H5I_DATASET();
+
+    private static native final int H5I_DATASPACE();
+
+    private static native final int H5I_DATATYPE();
+
+    private static native final int H5I_FILE();
+
+    private static native final int H5I_GENPROP_CLS();
+
+    private static native final int H5I_GENPROP_LST();
+
+    private static native final int H5I_GROUP();
+
+    private static native final int H5I_INVALID_HID();
+
+    private static native final int H5I_REFERENCE();
+
+    private static native final int H5I_VFL();
+
+    private static native final int H5L_TYPE_ERROR();
+    
+    private static native final int H5L_TYPE_HARD();
+    
+    private static native final int H5L_TYPE_SOFT();
+    
+    private static native final int H5L_TYPE_EXTERNAL();
+    
+    private static native final int H5L_TYPE_MAX();
+    
+    private static native final int H5O_COPY_SHALLOW_HIERARCHY_FLAG();
+    
+    private static native final int H5O_COPY_EXPAND_SOFT_LINK_FLAG();
+    
+    private static native final int H5O_COPY_EXPAND_EXT_LINK_FLAG();
+    
+    private static native final int H5O_COPY_EXPAND_REFERENCE_FLAG();
+    
+    private static native final int H5O_COPY_WITHOUT_ATTR_FLAG();
+    
+    private static native final int H5O_COPY_PRESERVE_NULL_FLAG();
+    
+    private static native final int H5O_SHMESG_NONE_FLAG();
+    
+    private static native final int H5O_SHMESG_SDSPACE_FLAG();
+    
+    private static native final int H5O_SHMESG_DTYPE_FLAG();
+    
+    private static native final int H5O_SHMESG_FILL_FLAG();
+    
+    private static native final int H5O_SHMESG_PLINE_FLAG();
+    
+    private static native final int H5O_SHMESG_ATTR_FLAG();
+    
+    private static native final int H5O_SHMESG_ALL_FLAG();
+    
+    private static native final int H5O_TYPE_UNKNOWN();
+    
+    private static native final int H5O_TYPE_GROUP();
+    
+    private static native final int H5O_TYPE_DATASET();
+    
+    private static native final int H5O_TYPE_NAMED_DATATYPE();
+    
+    private static native final int H5O_TYPE_NTYPES();
+    
+    private static native final int H5P_ROOT();
+    
+    private static native final int H5P_OBJECT_CREATE();
+    
+    private static native final int H5P_FILE_CREATE();
+    
+    private static native final int H5P_FILE_ACCESS();
+    
+    private static native final int H5P_DATASET_CREATE();
+        
+    private static native final int H5P_DATASET_ACCESS();
+        
+    private static native final int H5P_DATASET_XFER();
+        
+    private static native final int H5P_FILE_MOUNT();
+        
+    private static native final int H5P_GROUP_CREATE();
+        
+    private static native final int H5P_GROUP_ACCESS();
+        
+    private static native final int H5P_DATATYPE_CREATE();
+        
+    private static native final int H5P_DATATYPE_ACCESS();
+        
+    private static native final int H5P_STRING_CREATE();
+        
+    private static native final int H5P_ATTRIBUTE_CREATE();
+        
+    private static native final int H5P_OBJECT_COPY();
+        
+    private static native final int H5P_LINK_CREATE();
+        
+    private static native final int H5P_LINK_ACCESS();
+        
+    private static native final int H5P_FILE_CREATE_DEFAULT();
+        
+    private static native final int H5P_FILE_ACCESS_DEFAULT();
+        
+    private static native final int H5P_DATASET_CREATE_DEFAULT();
+        
+    private static native final int H5P_DATASET_ACCESS_DEFAULT();
+        
+    private static native final int H5P_DATASET_XFER_DEFAULT();
+        
+    private static native final int H5P_FILE_MOUNT_DEFAULT();
+        
+    private static native final int H5P_GROUP_CREATE_DEFAULT();
+        
+    private static native final int H5P_GROUP_ACCESS_DEFAULT();
+        
+    private static native final int H5P_DATATYPE_CREATE_DEFAULT();
+        
+    private static native final int H5P_DATATYPE_ACCESS_DEFAULT();
+        
+    private static native final int H5P_ATTRIBUTE_CREATE_DEFAULT();
+        
+    private static native final int H5P_OBJECT_COPY_DEFAULT();
+        
+    private static native final int H5P_LINK_CREATE_DEFAULT();
+        
+    private static native final int H5P_LINK_ACCESS_DEFAULT();
+    
+    private static native final int H5P_CRT_ORDER_TRACKED();
+    
+    private static native final int H5P_CRT_ORDER_INDEXED();
+
+    private static native final int H5P_DEFAULT();
+
+    private static native final int H5P_NO_CLASS();
+
+    private static native final int H5R_BADTYPE();
+
+    private static native final int H5R_DATASET_REGION();
+
+    private static native final int H5R_MAXTYPE();
+
+    private static native final int H5R_OBJ_REF_BUF_SIZE();
+
+    private static native final int H5R_OBJECT();
+
+    private static native final int H5S_ALL();
+
+    private static native final int H5S_MAX_RANK();
+
+    private static native final int H5S_NO_CLASS();
+
+    private static native final int H5S_NULL();
+
+    private static native final int H5S_SCALAR();
+
+    private static native final int H5S_SEL_ALL();
+
+    private static native final int H5S_SEL_ERROR();
+
+    private static native final int H5S_SEL_HYPERSLABS();
+
+    private static native final int H5S_SEL_N();
+
+    private static native final int H5S_SEL_NONE();
+
+    private static native final int H5S_SEL_POINTS();
+
+    private static native final int H5S_SELECT_AND();
+
+    private static native final int H5S_SELECT_APPEND();
+
+    private static native final int H5S_SELECT_INVALID();
+
+    private static native final int H5S_SELECT_NOOP();
+
+    private static native final int H5S_SELECT_NOTA();
+
+    private static native final int H5S_SELECT_NOTB();
+
+    private static native final int H5S_SELECT_OR();
+
+    private static native final int H5S_SELECT_PREPEND();
+
+    private static native final int H5S_SELECT_SET();
+
+    private static native final int H5S_SELECT_XOR();
+
+    private static native final int H5S_SIMPLE();
+
+    private static native final int H5S_UNLIMITED();
+
+    private static native final int H5T_ALPHA_B16();
+
+    private static native final int H5T_ALPHA_B32();
+
+    private static native final int H5T_ALPHA_B64();
+
+    private static native final int H5T_ALPHA_B8();
+
+    private static native final int H5T_ALPHA_F32();
+
+    private static native final int H5T_ALPHA_F64();
+
+    private static native final int H5T_ALPHA_I16();
+
+    private static native final int H5T_ALPHA_I32();
+
+    private static native final int H5T_ALPHA_I64();
+
+    private static native final int H5T_ALPHA_I8();
+
+    private static native final int H5T_ALPHA_U16();
+
+    private static native final int H5T_ALPHA_U32();
+
+    private static native final int H5T_ALPHA_U64();
+
+    private static native final int H5T_ALPHA_U8();
+
+    private static native final int H5T_ARRAY();
+
+    private static native final int H5T_BITFIELD();
+
+    private static native final int H5T_BKG_NO();
+
+    private static native final int H5T_BKG_YES();
+
+    private static native final int H5T_C_S1();
+
+    private static native final int H5T_COMPOUND();
+
+    private static native final int H5T_CONV_CONV();
+
+    private static native final int H5T_CONV_FREE();
+
+    private static native final int H5T_CONV_INIT();
+
+    private static native final int H5T_CSET_ERROR();
+
+    private static native final int H5T_CSET_ASCII();
+
+    private static native final int H5T_CSET_UTF8();
+
+    private static native final int H5T_CSET_RESERVED_10();
+
+    private static native final int H5T_CSET_RESERVED_11();
+
+    private static native final int H5T_CSET_RESERVED_12();
+
+    private static native final int H5T_CSET_RESERVED_13();
+
+    private static native final int H5T_CSET_RESERVED_14();
+
+    private static native final int H5T_CSET_RESERVED_15();
+
+    private static native final int H5T_CSET_RESERVED_2();
+
+    private static native final int H5T_CSET_RESERVED_3();
+
+    private static native final int H5T_CSET_RESERVED_4();
+
+    private static native final int H5T_CSET_RESERVED_5();
+
+    private static native final int H5T_CSET_RESERVED_6();
+
+    private static native final int H5T_CSET_RESERVED_7();
+
+    private static native final int H5T_CSET_RESERVED_8();
+
+    private static native final int H5T_CSET_RESERVED_9();
+
+    private static native final int H5T_DIR_ASCEND();
+
+    private static native final int H5T_DIR_DEFAULT();
+
+    private static native final int H5T_DIR_DESCEND();
+
+    private static native final int H5T_ENUM();
+
+    private static native final int H5T_FLOAT();
+
+    private static native final int H5T_FORTRAN_S1();
+
+    private static native final int H5T_IEEE_F32BE();
+
+    private static native final int H5T_IEEE_F32LE();
+
+    private static native final int H5T_IEEE_F64BE();
+
+    private static native final int H5T_IEEE_F64LE();
+
+    private static native final int H5T_INTEGER();
+
+    private static native final int H5T_INTEL_B16();
+
+    private static native final int H5T_INTEL_B32();
+
+    private static native final int H5T_INTEL_B64();
+
+    private static native final int H5T_INTEL_B8();
+
+    private static native final int H5T_INTEL_F32();
+
+    private static native final int H5T_INTEL_F64();
+
+    private static native final int H5T_INTEL_I16();
+
+    private static native final int H5T_INTEL_I32();
+
+    private static native final int H5T_INTEL_I64();
+
+    private static native final int H5T_INTEL_I8();
+
+    private static native final int H5T_INTEL_U16();
+
+    private static native final int H5T_INTEL_U32();
+
+    private static native final int H5T_INTEL_U64();
+
+    private static native final int H5T_INTEL_U8();
+
+    private static native final int H5T_MIPS_B16();
+
+    private static native final int H5T_MIPS_B32();
+
+    private static native final int H5T_MIPS_B64();
+
+    private static native final int H5T_MIPS_B8();
+
+    private static native final int H5T_MIPS_F32();
+
+    private static native final int H5T_MIPS_F64();
+
+    private static native final int H5T_MIPS_I16();
+
+    private static native final int H5T_MIPS_I32();
+
+    private static native final int H5T_MIPS_I64();
+
+    private static native final int H5T_MIPS_I8();
+
+    private static native final int H5T_MIPS_U16();
+
+    private static native final int H5T_MIPS_U32();
+
+    private static native final int H5T_MIPS_U64();
+
+    private static native final int H5T_MIPS_U8();
+
+    private static native final int H5T_NATIVE_B16();
+
+    private static native final int H5T_NATIVE_B32();
+
+    private static native final int H5T_NATIVE_B64();
+
+    private static native final int H5T_NATIVE_B8();
+
+    private static native final int H5T_NATIVE_CHAR();
+
+    private static native final int H5T_NATIVE_DOUBLE();
+
+    private static native final int H5T_NATIVE_FLOAT();
+
+    private static native final int H5T_NATIVE_HADDR();
+
+    private static native final int H5T_NATIVE_HBOOL();
+
+    private static native final int H5T_NATIVE_HERR();
+
+    private static native final int H5T_NATIVE_HSIZE();
+
+    private static native final int H5T_NATIVE_HSSIZE();
+
+    private static native final int H5T_NATIVE_INT();
+
+    private static native final int H5T_NATIVE_INT_FAST16();
+
+    private static native final int H5T_NATIVE_INT_FAST32();
+
+    private static native final int H5T_NATIVE_INT_FAST64();
+
+    private static native final int H5T_NATIVE_INT_FAST8();
+
+    private static native final int H5T_NATIVE_INT_LEAST16();
+
+    private static native final int H5T_NATIVE_INT_LEAST32();
+
+    private static native final int H5T_NATIVE_INT_LEAST64();
+
+    private static native final int H5T_NATIVE_INT_LEAST8();
+
+    private static native final int H5T_NATIVE_INT16();
+
+    private static native final int H5T_NATIVE_INT32();
+
+    private static native final int H5T_NATIVE_INT64();
+
+    private static native final int H5T_NATIVE_INT8();
+
+    private static native final int H5T_NATIVE_LDOUBLE();
+
+    private static native final int H5T_NATIVE_LLONG();
+
+    private static native final int H5T_NATIVE_LONG();
+
+    private static native final int H5T_NATIVE_OPAQUE();
+
+    private static native final int H5T_NATIVE_SCHAR();
+
+    private static native final int H5T_NATIVE_SHORT();
+
+    private static native final int H5T_NATIVE_UCHAR();
+
+    private static native final int H5T_NATIVE_UINT();
+
+    private static native final int H5T_NATIVE_UINT_FAST16();
+
+    private static native final int H5T_NATIVE_UINT_FAST32();
+
+    private static native final int H5T_NATIVE_UINT_FAST64();
+
+    private static native final int H5T_NATIVE_UINT_FAST8();
+
+    private static native final int H5T_NATIVE_UINT_LEAST16();
+
+    private static native final int H5T_NATIVE_UINT_LEAST32();
+
+    private static native final int H5T_NATIVE_UINT_LEAST64();
+
+    private static native final int H5T_NATIVE_UINT_LEAST8();
+
+    private static native final int H5T_NATIVE_UINT16();
+
+    private static native final int H5T_NATIVE_UINT32();
+
+    private static native final int H5T_NATIVE_UINT64();
+
+    private static native final int H5T_NATIVE_UINT8();
+
+    private static native final int H5T_NATIVE_ULLONG();
+
+    private static native final int H5T_NATIVE_ULONG();
+
+    private static native final int H5T_NATIVE_USHORT();
+
+    private static native final int H5T_NCLASSES();
+
+    private static native final int H5T_NO_CLASS();
+
+    private static native final int H5T_NORM_ERROR();
+
+    private static native final int H5T_NORM_IMPLIED();
+
+    private static native final int H5T_NORM_MSBSET();
+
+    private static native final int H5T_NORM_NONE();
+
+    private static native final int H5T_NPAD();
+
+    private static native final int H5T_NSGN();
+
+    private static native final int H5T_OPAQUE();
+
+    private static native final int H5T_OPAQUE_TAG_MAX();
+
+    private static native final int H5T_ORDER_BE();
+
+    private static native final int H5T_ORDER_ERROR();
+
+    private static native final int H5T_ORDER_LE();
+
+    private static native final int H5T_ORDER_NONE();
+
+    private static native final int H5T_ORDER_VAX();
+
+    private static native final int H5T_PAD_BACKGROUND();
+
+    private static native final int H5T_PAD_ERROR();
+
+    private static native final int H5T_PAD_ONE();
+
+    private static native final int H5T_PAD_ZERO();
+
+    private static native final int H5T_PERS_DONTCARE();
+
+    private static native final int H5T_PERS_HARD();
+
+    private static native final int H5T_PERS_SOFT();
+
+    private static native final int H5T_REFERENCE();
+
+    private static native final int H5T_SGN_2();
+
+    private static native final int H5T_SGN_ERROR();
+
+    private static native final int H5T_SGN_NONE();
+
+    private static native final int H5T_STD_B16BE();
+
+    private static native final int H5T_STD_B16LE();
+
+    private static native final int H5T_STD_B32BE();
+
+    private static native final int H5T_STD_B32LE();
+
+    private static native final int H5T_STD_B64BE();
+
+    private static native final int H5T_STD_B64LE();
+
+    private static native final int H5T_STD_B8BE();
+
+    private static native final int H5T_STD_B8LE();
+
+    private static native final int H5T_STD_I16BE();
+
+    private static native final int H5T_STD_I16LE();
+
+    private static native final int H5T_STD_I32BE();
+
+    private static native final int H5T_STD_I32LE();
+
+    private static native final int H5T_STD_I64BE();
+
+    private static native final int H5T_STD_I64LE();
+
+    private static native final int H5T_STD_I8BE();
+
+    private static native final int H5T_STD_I8LE();
+
+    private static native final int H5T_STD_REF_DSETREG();
+
+    private static native final int H5T_STD_REF_OBJ();
+
+    private static native final int H5T_STD_U16BE();
+
+    private static native final int H5T_STD_U16LE();
+
+    private static native final int H5T_STD_U32BE();
+
+    private static native final int H5T_STD_U32LE();
+
+    private static native final int H5T_STD_U64BE();
+
+    private static native final int H5T_STD_U64LE();
+
+    private static native final int H5T_STD_U8BE();
+
+    private static native final int H5T_STD_U8LE();
+
+    private static native final int H5T_STR_ERROR();
+
+    private static native final int H5T_STR_NULLPAD();
+
+    private static native final int H5T_STR_NULLTERM();
+
+    private static native final int H5T_STR_RESERVED_10();
+
+    private static native final int H5T_STR_RESERVED_11();
+
+    private static native final int H5T_STR_RESERVED_12();
+
+    private static native final int H5T_STR_RESERVED_13();
+
+    private static native final int H5T_STR_RESERVED_14();
+
+    private static native final int H5T_STR_RESERVED_15();
+
+    private static native final int H5T_STR_RESERVED_3();
+
+    private static native final int H5T_STR_RESERVED_4();
+
+    private static native final int H5T_STR_RESERVED_5();
+
+    private static native final int H5T_STR_RESERVED_6();
+
+    private static native final int H5T_STR_RESERVED_7();
+
+    private static native final int H5T_STR_RESERVED_8();
+
+    private static native final int H5T_STR_RESERVED_9();
+
+    private static native final int H5T_STR_SPACEPAD();
+
+    private static native final int H5T_STRING();
+
+    private static native final int H5T_TIME();
+
+    private static native final int H5T_UNIX_D32BE();
+
+    private static native final int H5T_UNIX_D32LE();
+
+    private static native final int H5T_UNIX_D64BE();
+
+    private static native final int H5T_UNIX_D64LE();
+
+    private static native final int H5T_VARIABLE();
+
+    private static native final int H5T_VLEN();
+
+    private static native final int H5Z_CB_CONT();
+
+    private static native final int H5Z_CB_ERROR();
+
+    private static native final int H5Z_CB_FAIL();
+
+    private static native final int H5Z_CB_NO();
+
+    private static native final int H5Z_DISABLE_EDC();
+
+    private static native final int H5Z_ENABLE_EDC();
+
+    private static native final int H5Z_ERROR_EDC();
+
+    private static native final int H5Z_FILTER_DEFLATE();
+
+    private static native final int H5Z_FILTER_ERROR();
+
+    private static native final int H5Z_FILTER_FLETCHER32();
+
+    private static native final int H5Z_FILTER_MAX();
+    
+    private static native final int H5Z_FILTER_NBIT();
+
+    private static native final int H5Z_FILTER_NONE();
+
+    private static native final int H5Z_FILTER_RESERVED();
+    
+    private static native final int H5Z_FILTER_SCALEOFFSET();
+
+    private static native final int H5Z_FILTER_SHUFFLE();
+
+    private static native final int H5Z_FILTER_SZIP();
+
+    private static native final int H5Z_FLAG_DEFMASK();
+
+    private static native final int H5Z_FLAG_INVMASK();
+
+    private static native final int H5Z_FLAG_MANDATORY();
+
+    private static native final int H5Z_FLAG_OPTIONAL();
+
+    private static native final int H5Z_FLAG_REVERSE();
+
+    private static native final int H5Z_FLAG_SKIP_EDC();
+
+    private static native final int H5Z_MAX_NFILTERS();
+
+    private static native final int H5Z_NO_EDC();
+
+    private static native final int H5Z_FILTER_CONFIG_ENCODE_ENABLED();
+
+    private static native final int H5Z_FILTER_CONFIG_DECODE_ENABLED();
+
+    private static native final int H5Z_SO_INT_MINBITS_DEFAULT();
+    
+    private static native final int H5Z_SO_FLOAT_DSCALE();
+    
+    private static native final int H5Z_SO_FLOAT_ESCALE();
+    
+    private static native final int H5Z_SO_INT();
+
+    private static native final int H5Z_SHUFFLE_USER_NPARMS();
+
+    private static native final int H5Z_SHUFFLE_TOTAL_NPARMS();
+
+    private static native final int H5Z_SZIP_USER_NPARMS();
+
+    private static native final int H5Z_SZIP_TOTAL_NPARMS();
+
+    private static native final int H5Z_SZIP_PARM_MASK();
+
+    private static native final int H5Z_SZIP_PARM_PPB();
+
+    private static native final int H5Z_SZIP_PARM_BPP();
+
+    private static native final int H5Z_SZIP_PARM_PPS();
+
+    private static native final int H5Z_NBIT_USER_NPARMS();
+
+    private static native final int H5Z_SCALEOFFSET_USER_NPARMS();
+
+    private static native final int H5Z_FILTER_ALL();
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/HDF5GroupInfo.java b/source/java/ncsa/hdf/hdf5lib/HDF5GroupInfo.java
new file mode 100644
index 0000000..620538a
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/HDF5GroupInfo.java
@@ -0,0 +1,170 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib;
+
+/**
+ * <p>
+ * This class is a container for the information reported about an HDF5 Object
+ * from the H5Gget_obj_info() method.
+ * <p>
+ * The fileno and objno fields contain four values which uniquely identify an
+ * object among those HDF5 files which are open: if all four values are the same
+ * between two objects, then the two objects are the same (provided both files
+ * are still open). The nlink field is the number of hard links to the object or
+ * zero when information is being returned about a symbolic link (symbolic links
+ * do not have hard links but all other objects always have at least one). The
+ * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or
+ * H5G_LINK. The mtime field contains the modification time. If information is
+ * being returned about a symbolic link then linklen will be the length of the
+ * link value (the name of the pointed-to object with the null terminator);
+ * otherwise linklen will be zero. Other fields may be added to this structure
+ * in the future.
+ * <p>
+ * For details of the HDF5 libraries, see the HDF5 Documentation at: <a
+ * href="http://hdf.ncsa.uiuc.edu/HDF5/doc/"
+ * >http://hdf.ncsa.uiuc.edu/HDF5/doc/</a>
+ */
+
+public class HDF5GroupInfo {
+    long[] fileno;
+    long[] objno;
+    int nlink;
+    int type;
+    long mtime;
+    int linklen;
+
+    public HDF5GroupInfo() {
+        fileno = new long[2];
+        objno = new long[2];
+        nlink = -1;
+        type = -1;
+        mtime = 0;
+        linklen = 0;
+    }
+
+    /**
+     * Sets the HDF5 group information. Used by the JHI5.
+     * 
+     * @param fn
+     *            File id number
+     * @param on
+     *            Object id number
+     * @param nl
+     *            Number of links
+     * @param t
+     *            Type of the object
+     * @param mt
+     *            Modification time
+     * @param len
+     *            Length of link
+     **/
+    public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt,
+            int len) {
+        fileno = fn;
+        objno = on;
+        nlink = nl;
+        type = t;
+        mtime = mt;
+        linklen = len;
+    }
+
+    /** Resets all the group information to defaults. */
+    public void reset() {
+        fileno[0] = 0;
+        fileno[1] = 0;
+        objno[0] = 0;
+        objno[1] = 0;
+        nlink = -1;
+        type = -1;
+        mtime = 0;
+        linklen = 0;
+    }
+
+    /* accessors */
+    public long[] getFileno() {
+        return fileno;
+    }
+
+    public long[] getObjno() {
+        return objno;
+    }
+
+    public int getType() {
+        return type;
+    }
+
+    public int getNlink() {
+        return nlink;
+    }
+
+    public long getMtime() {
+        return mtime;
+    }
+
+    public int getLinklen() {
+        return linklen;
+    }
+
+    /**
+     * The fileno and objno fields contain four values which uniquely identify
+     * an object among those HDF5 files.
+     */
+    @Override
+	public boolean equals(Object obj) {
+        if (!(obj instanceof HDF5GroupInfo)) {
+            return false;
+        }
+
+        HDF5GroupInfo target = (HDF5GroupInfo) obj;
+        if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1])
+                && (objno[0] == target.objno[0])
+                && (objno[1] == target.objno[1])) {
+            return true;
+        }
+        else {
+            return false;
+        }
+    }
+
+    /**
+     * Returns the object id.
+     */
+    public long getOID() {
+        return objno[0];
+    }
+
+    /**
+     * /** Converts this object to a String representation.
+     * 
+     * @return a string representation of this object
+     */
+    @Override
+	public String toString() {
+        String fileStr = "fileno=null";
+        String objStr = "objno=null";
+
+        if (fileno != null) {
+            fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1];
+        }
+
+        if (objno != null) {
+            objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1];
+        }
+
+        return getClass().getName() + "[" + fileStr + "," + objStr + ",type="
+                + type + ",nlink=" + nlink + ",mtime=" + mtime + ",linklen="
+                + linklen + "]";
+    }
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/HDFArray.java b/source/java/ncsa/hdf/hdf5lib/HDFArray.java
new file mode 100755
index 0000000..973b998
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/HDFArray.java
@@ -0,0 +1,1093 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * This is a class for handling multidimensional arrays for HDF.
+ * <p>
+ * The purpose is to allow the storage and retrieval of arbitrary array types
+ * containing scientific data.
+ * <p>
+ * The methods support the conversion of an array to and from Java to a
+ * one-dimensional array of bytes suitable for I/O by the C library.
+ * <p>
+ * This class heavily uses the <a
+ * href="./ncsa.hdf.hdf5lib.HDFNativeData.html">HDFNativeData</a> class to
+ * convert between Java and C representations.
+ */
+
+public class HDFArray {
+
+    private Object _theArray = null;
+    private ArrayDescriptor _desc = null;
+    private byte[] _barray = null;
+
+    // public HDFArray() {}
+
+    /**
+     * The input must be a Java Array (possibly multidimensional) of primitive
+     * numbers or sub-classes of Number.
+     * <P>
+     * The input is analysed to determine the number of dimensions and size of
+     * each dimension, as well as the type of the elements.
+     * <P>
+     * The description is saved in private variables, and used to convert data.
+     * 
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5Exception
+     *                object is not an array.
+     */
+    public HDFArray(Object anArray) throws HDF5Exception {
+
+        if (anArray == null) {
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: array is null?: ");
+            throw (ex);
+        }
+        Class<?> tc = anArray.getClass();
+        if (tc.isArray() == false) {
+            /* exception: not an array */
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: not an array?: ");
+            throw (ex);
+        }
+        _theArray = anArray;
+        _desc = new ArrayDescriptor(_theArray);
+
+        /* extra error checking -- probably not needed */
+        if (_desc == null) {
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: internal error: array description failed?: ");
+            throw (ex);
+        }
+    }
+
+    /**
+     * Allocate a one-dimensional array of bytes sufficient to store the array.
+     * 
+     * @return A one-D array of bytes, filled with zeroes. The bytes are
+     *         sufficient to hold the data of the Array passed to the
+     *         constructor.
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5JavaException
+     *                Allocation failed.
+     */
+
+    public byte[] emptyBytes() throws HDF5Exception {
+        byte[] b = null;
+
+        if ((ArrayDescriptor.dims == 1) && (ArrayDescriptor.NT == 'B')) {
+            b = (byte[]) _theArray;
+        }
+        else {
+            b = new byte[ArrayDescriptor.totalSize];
+        }
+        if (b == null) {
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: emptyBytes: allocation failed");
+            throw (ex);
+        }
+        return (b);
+    }
+
+    /**
+     * Given a Java array of numbers, convert it to a one-dimensional array of
+     * bytes in correct native order.
+     * 
+     * @return A one-D array of bytes, constructed from the Array passed to the
+     *         constructor.
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5Exception
+     *                thrown for errors in HDF5
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5JavaException
+     *                the object not an array or other internal error.
+     */
+    public byte[] byteify() throws HDF5Exception {
+
+        if (_barray != null) {
+            return _barray;
+        }
+
+        if (_theArray == null) {
+            /* exception: not an array */
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: byteify not an array?: ");
+            throw (ex);
+        }
+
+        if (ArrayDescriptor.dims == 1) {
+            /* special case */
+            if (ArrayDescriptor.NT == 'B') {
+                /* really special case! */
+                _barray = (byte[]) _theArray;
+                return _barray;
+            }
+            else {
+                try {
+                    _barray = new byte[ArrayDescriptor.totalSize];
+
+                    byte[] therow;
+                    if (ArrayDescriptor.NT == 'I') {
+                        therow = HDFNativeData.intToByte(0,
+                                ArrayDescriptor.dimlen[1], (int[]) _theArray);
+                    }
+                    else if (ArrayDescriptor.NT == 'S') {
+                        therow = HDFNativeData.shortToByte(0,
+                                ArrayDescriptor.dimlen[1], (short[]) _theArray);
+                    }
+                    else if (ArrayDescriptor.NT == 'F') {
+                        therow = HDFNativeData.floatToByte(0,
+                                ArrayDescriptor.dimlen[1], (float[]) _theArray);
+                    }
+                    else if (ArrayDescriptor.NT == 'J') {
+                        therow = HDFNativeData.longToByte(0,
+                                ArrayDescriptor.dimlen[1], (long[]) _theArray);
+                    }
+                    else if (ArrayDescriptor.NT == 'D') {
+                        therow = HDFNativeData
+                                .doubleToByte(0, ArrayDescriptor.dimlen[1],
+                                        (double[]) _theArray);
+                    }
+                    else if (ArrayDescriptor.NT == 'L') {
+                        if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+                            therow = ByteObjToByte((Byte[]) _theArray);
+                        }
+                        else if (ArrayDescriptor.className
+                                .equals("java.lang.Integer")) {
+                            therow = IntegerToByte((Integer[]) _theArray);
+                        }
+                        else if (ArrayDescriptor.className
+                                .equals("java.lang.Short")) {
+                            therow = ShortToByte((Short[]) _theArray);
+                        }
+                        else if (ArrayDescriptor.className
+                                .equals("java.lang.Float")) {
+                            therow = FloatObjToByte((Float[]) _theArray);
+                        }
+                        else if (ArrayDescriptor.className
+                                .equals("java.lang.Double")) {
+                            therow = DoubleObjToByte((Double[]) _theArray);
+                        }
+                        else if (ArrayDescriptor.className
+                                .equals("java.lang.Long")) {
+                            therow = LongObjToByte((Long[]) _theArray);
+                        }
+                        else {
+                            HDF5JavaException ex = new HDF5JavaException(
+                                    "HDFArray: unknown type of Object?");
+                            throw (ex);
+                        }
+                    }
+                    else {
+                        HDF5JavaException ex = new HDF5JavaException(
+                                "HDFArray: unknown type of data?");
+                        throw (ex);
+                    }
+                    System
+                            .arraycopy(
+                                    therow,
+                                    0,
+                                    _barray,
+                                    0,
+                                    (ArrayDescriptor.dimlen[1] * ArrayDescriptor.NTsize));
+                    return _barray;
+                }
+                catch (OutOfMemoryError err) {
+                    HDF5JavaException ex = new HDF5JavaException(
+                            "HDFArray: byteify array too big?");
+                    throw (ex);
+                }
+            }
+        }
+
+        try {
+            _barray = new byte[ArrayDescriptor.totalSize];
+        }
+        catch (OutOfMemoryError err) {
+            HDF5JavaException ex = new HDF5JavaException(
+                    "HDFArray: byteify array too big?");
+            throw (ex);
+        }
+
+        Object oo = _theArray;
+        int n = 0; /* the current byte */
+        int index = 0;
+        int i;
+        while (n < ArrayDescriptor.totalSize) {
+            oo = ArrayDescriptor.objs[0];
+            index = n / ArrayDescriptor.bytetoindex[0];
+            index %= ArrayDescriptor.dimlen[0];
+            for (i = 0; i < (ArrayDescriptor.dims); i++) {
+                index = n / ArrayDescriptor.bytetoindex[i];
+                index %= ArrayDescriptor.dimlen[i];
+
+                if (index == ArrayDescriptor.currentindex[i]) {
+                    /* then use cached copy */
+                    oo = ArrayDescriptor.objs[i];
+                }
+                else {
+                    /* check range of index */
+                    if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+                        throw new java.lang.IndexOutOfBoundsException(
+                                "HDFArray: byteify index OOB?");
+                    }
+                    oo = java.lang.reflect.Array.get(oo, index);
+                    ArrayDescriptor.currentindex[i] = index;
+                    ArrayDescriptor.objs[i] = oo;
+                }
+            }
+
+            /* byte-ify */
+            byte arow[];
+            try {
+                if (ArrayDescriptor.NT == 'J') {
+                    arow = HDFNativeData
+                            .longToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    arow = HDFNativeData
+                            .longToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                }
+                else if (ArrayDescriptor.NT == 'I') {
+                    arow = HDFNativeData
+                            .intToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (int[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                }
+                else if (ArrayDescriptor.NT == 'S') {
+                    arow = HDFNativeData
+                            .shortToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (short[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                }
+                else if (ArrayDescriptor.NT == 'B') {
+                    arow = (byte[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1];
+                }
+                else if (ArrayDescriptor.NT == 'F') {
+                    /* 32 bit float */
+                    arow = HDFNativeData
+                            .floatToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (float[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                }
+                else if (ArrayDescriptor.NT == 'D') {
+                    /* 64 bit float */
+                    arow = HDFNativeData
+                            .doubleToByte(
+                                    0,
+                                    ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                    (double[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                }
+                else if (ArrayDescriptor.NT == 'L') {
+                    if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+                        arow = ByteObjToByte((Byte[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Integer")) {
+                        arow = IntegerToByte((Integer[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Short")) {
+                        arow = ShortToByte((Short[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Float")) {
+                        arow = FloatObjToByte((Float[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Double")) {
+                        arow = DoubleObjToByte((Double[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+                        arow = LongObjToByte((Long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+                    }
+                    else {
+                        HDF5JavaException ex = new HDF5JavaException(
+                                "HDFArray: byteify Object type not implemented?");
+                        throw (ex);
+                    }
+                }
+                else {
+                    HDF5JavaException ex = new HDF5JavaException(
+                            "HDFArray: byteify unknown type not implemented?");
+                    throw (ex);
+                }
+                System
+                        .arraycopy(
+                                arow,
+                                0,
+                                _barray,
+                                n,
+                                (ArrayDescriptor.dimlen[ArrayDescriptor.dims] * ArrayDescriptor.NTsize));
+                n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+            }
+            catch (OutOfMemoryError err) {
+                HDF5JavaException ex = new HDF5JavaException(
+                        "HDFArray: byteify array too big?");
+                throw (ex);
+            }
+        }
+        /* assert: the whole array is completed--currentindex should == len - 1 */
+
+        /* error checks */
+
+        if (n < ArrayDescriptor.totalSize) {
+            throw new java.lang.InternalError(new String(
+                    "HDFArray::byteify: Panic didn't complete all input data: n=  "
+                            + n + " size = " + ArrayDescriptor.totalSize));
+        }
+        for (i = 0; i < ArrayDescriptor.dims; i++) {
+            if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+                throw new java.lang.InternalError(new String(
+                        "Panic didn't complete all data: currentindex[" + i
+                                + "] = " + ArrayDescriptor.currentindex[i]
+                                + " (should be "
+                                + (ArrayDescriptor.dimlen[i] - 1) + " ?)"));
+            }
+        }
+        return _barray;
+    }
+
+    /**
+     * Given a one-dimensional array of bytes representing numbers, convert it
+     * to a java array of the shape and size passed to the constructor.
+     * 
+     * @param bytes
+     *            The bytes to construct the Array.
+     * @return An Array (possibly multidimensional) of primitive or number
+     *         objects.
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5Exception
+     *                thrown for errors in HDF5
+     * @exception ncsa.hdf.hdf5lib.exceptions.HDF5JavaException
+     *                the object not an array or other internal error.
+     */
+    public Object arrayify(byte[] bytes) throws HDF5Exception {
+
+        if (_theArray == null) {
+            /* exception: not an array */
+            HDF5JavaException ex = new HDF5JavaException(
+                    "arrayify: not an array?: ");
+            throw (ex);
+        }
+
+        if (java.lang.reflect.Array.getLength(bytes) != ArrayDescriptor.totalSize) {
+            /* exception: array not right size */
+            HDF5JavaException ex = new HDF5JavaException(
+                    "arrayify: array is wrong size?: ");
+            throw (ex);
+        }
+        _barray = bytes; /* hope that the bytes are correct.... */
+        if (ArrayDescriptor.dims == 1) {
+            /* special case */
+            /* 2 data copies here! */
+            try {
+                if (ArrayDescriptor.NT == 'I') {
+                    int[] x = HDFNativeData.byteToInt(_barray);
+                    System.arraycopy(x, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'S') {
+                    short[] x = HDFNativeData.byteToShort(_barray);
+                    System.arraycopy(x, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'F') {
+                    float x[] = HDFNativeData.byteToFloat(_barray);
+                    System.arraycopy(x, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'J') {
+                    long x[] = HDFNativeData.byteToLong(_barray);
+                    System.arraycopy(x, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'D') {
+                    double x[] = HDFNativeData.byteToDouble(_barray);
+                    System.arraycopy(x, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'B') {
+                    System.arraycopy(_barray, 0, _theArray, 0,
+                            ArrayDescriptor.dimlen[1]);
+                    return _theArray;
+                }
+                else if (ArrayDescriptor.NT == 'L') {
+                    if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+                        Byte I[] = ByteToByteObj(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Integer")) {
+                        Integer I[] = ByteToInteger(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Short")) {
+                        Short I[] = ByteToShort(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Float")) {
+                        Float I[] = ByteToFloatObj(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Double")) {
+                        Double I[] = ByteToDoubleObj(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+                        Long I[] = ByteToLongObj(_barray);
+                        System.arraycopy(I, 0, _theArray, 0,
+                                ArrayDescriptor.dimlen[1]);
+                        return _theArray;
+                    }
+                    else {
+                        HDF5JavaException ex = new HDF5JavaException(
+                                "arrayify:  Object type not implemented yet...");
+                        throw (ex);
+                    }
+                }
+                else {
+                    HDF5JavaException ex = new HDF5JavaException(
+                            "arrayify:  unknown type not implemented yet...");
+                    throw (ex);
+                }
+            }
+            catch (OutOfMemoryError err) {
+                HDF5JavaException ex = new HDF5JavaException(
+                        "HDFArray: arrayify array too big?");
+                throw (ex);
+            }
+        }
+        /* Assert dims >= 2 */
+
+        Object oo = _theArray;
+        int n = 0; /* the current byte */
+        int index = 0;
+        int i;
+        while (n < ArrayDescriptor.totalSize) {
+            oo = ArrayDescriptor.objs[0];
+            index = n / ArrayDescriptor.bytetoindex[0];
+            index %= ArrayDescriptor.dimlen[0];
+            for (i = 0; i < (ArrayDescriptor.dims); i++) {
+                index = n / ArrayDescriptor.bytetoindex[i];
+                index %= ArrayDescriptor.dimlen[i];
+
+                if (index == ArrayDescriptor.currentindex[i]) {
+                    /* then use cached copy */
+                    oo = ArrayDescriptor.objs[i];
+                }
+                else {
+                    /* check range of index */
+                    if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+                        System.out.println("out of bounds?");
+                        return null;
+                    }
+                    oo = java.lang.reflect.Array.get(oo, index);
+                    ArrayDescriptor.currentindex[i] = index;
+                    ArrayDescriptor.objs[i] = oo;
+                }
+            }
+
+            /* array-ify */
+            try {
+                if (ArrayDescriptor.NT == 'J') {
+                    long[] arow = HDFNativeData.byteToLong(n,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                            _barray);
+                    java.lang.reflect.Array
+                            .set(
+                                    ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                    (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                    arow);
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                    ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                }
+                else if (ArrayDescriptor.NT == 'I') {
+                    int[] arow = HDFNativeData.byteToInt(n,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                            _barray);
+                    java.lang.reflect.Array
+                            .set(
+                                    ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                    (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                    arow);
+
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                    ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                }
+                else if (ArrayDescriptor.NT == 'S') {
+                    short[] arow = HDFNativeData.byteToShort(n,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                            _barray);
+                    java.lang.reflect.Array
+                            .set(
+                                    ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                    (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                    arow);
+
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                    ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                }
+                else if (ArrayDescriptor.NT == 'B') {
+                    System.arraycopy(_barray, n,
+                            ArrayDescriptor.objs[ArrayDescriptor.dims - 1], 0,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims]);
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                }
+                else if (ArrayDescriptor.NT == 'F') {
+                    float arow[] = HDFNativeData.byteToFloat(n,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                            _barray);
+                    java.lang.reflect.Array
+                            .set(
+                                    ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                    (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                    arow);
+
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                    ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                }
+                else if (ArrayDescriptor.NT == 'D') {
+                    double[] arow = HDFNativeData.byteToDouble(n,
+                            ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                            _barray);
+                    java.lang.reflect.Array
+                            .set(
+                                    ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                    (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                    arow);
+
+                    n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                    ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                }
+                else if (ArrayDescriptor.NT == 'L') {
+                    if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+                        Byte I[] = ByteToByteObj(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Integer")) {
+                        Integer I[] = ByteToInteger(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Short")) {
+                        Short I[] = ByteToShort(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Float")) {
+                        Float I[] = ByteToFloatObj(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else if (ArrayDescriptor.className
+                            .equals("java.lang.Double")) {
+                        Double I[] = ByteToDoubleObj(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+                        Long I[] = ByteToLongObj(n,
+                                ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+                                _barray);
+                        java.lang.reflect.Array
+                                .set(
+                                        ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+                                        (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+                                        I);
+
+                        n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+                        ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+                    }
+                    else {
+                        HDF5JavaException ex = new HDF5JavaException(
+                                "HDFArray: unsupported Object type: "
+                                        + ArrayDescriptor.NT);
+                        throw (ex);
+                    }
+                }
+                else {
+                    HDF5JavaException ex = new HDF5JavaException(
+                            "HDFArray: unknown or unsupported type: "
+                                    + ArrayDescriptor.NT);
+                    throw (ex);
+                }
+            }
+            catch (OutOfMemoryError err) {
+                HDF5JavaException ex = new HDF5JavaException(
+                        "HDFArray: arrayify array too big?");
+                throw (ex);
+            }
+
+        }
+
+        /* assert: the whole array is completed--currentindex should == len - 1 */
+
+        /* error checks */
+
+        if (n < ArrayDescriptor.totalSize) {
+            throw new java.lang.InternalError(new String(
+                    "HDFArray::arrayify Panic didn't complete all input data: n=  "
+                            + n + " size = " + ArrayDescriptor.totalSize));
+        }
+        for (i = 0; i <= ArrayDescriptor.dims - 2; i++) {
+            if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+                throw new java.lang.InternalError(new String(
+                        "HDFArray::arrayify Panic didn't complete all data: currentindex["
+                                + i + "] = " + ArrayDescriptor.currentindex[i]
+                                + " (should be "
+                                + (ArrayDescriptor.dimlen[i] - 1) + "?"));
+            }
+        }
+        if (ArrayDescriptor.NT != 'B') {
+            if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1]) {
+                throw new java.lang.InternalError(new String(
+                        "HDFArray::arrayify Panic didn't complete all data: currentindex["
+                                + i + "] = " + ArrayDescriptor.currentindex[i]
+                                + " (should be " + (ArrayDescriptor.dimlen[i])
+                                + "?"));
+            }
+        }
+        else {
+            if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != (ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1] - 1)) {
+                throw new java.lang.InternalError(new String(
+                        "HDFArray::arrayify Panic didn't complete all data: currentindex["
+                                + i + "] = " + ArrayDescriptor.currentindex[i]
+                                + " (should be "
+                                + (ArrayDescriptor.dimlen[i] - 1) + "?"));
+            }
+        }
+
+        return _theArray;
+    }
+
+    private byte[] IntegerToByte(Integer in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        int[] out = new int[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].intValue();
+        }
+        return HDFNativeData.intToByte(0, nelems, out);
+    }
+
+    private Integer[] ByteToInteger(byte[] bin) {
+        int in[] = HDFNativeData.byteToInt(bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Integer[] out = new Integer[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Integer(in[i]);
+        }
+        return out;
+    }
+
+    private Integer[] ByteToInteger(int start, int len, byte[] bin) {
+        int in[] = HDFNativeData.byteToInt(start, len, bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Integer[] out = new Integer[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Integer(in[i]);
+        }
+        return out;
+    }
+
+    private byte[] ShortToByte(Short in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        short[] out = new short[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].shortValue();
+        }
+        return HDFNativeData.shortToByte(0, nelems, out);
+    }
+
+    private Short[] ByteToShort(byte[] bin) {
+        short in[] = HDFNativeData.byteToShort(bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Short[] out = new Short[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Short(in[i]);
+        }
+        return out;
+    }
+
+    private Short[] ByteToShort(int start, int len, byte[] bin) {
+        short in[] = HDFNativeData.byteToShort(start, len, bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Short[] out = new Short[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Short(in[i]);
+        }
+        return out;
+    }
+
+    private byte[] ByteObjToByte(Byte in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        byte[] out = new byte[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].byteValue();
+        }
+        return out;
+    }
+
+    private Byte[] ByteToByteObj(byte[] bin) {
+        int nelems = java.lang.reflect.Array.getLength(bin);
+        Byte[] out = new Byte[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Byte(bin[i]);
+        }
+        return out;
+    }
+
+    private Byte[] ByteToByteObj(int start, int len, byte[] bin) {
+        Byte[] out = new Byte[len];
+
+        for (int i = 0; i < len; i++) {
+            out[i] = new Byte(bin[i]);
+        }
+        return out;
+    }
+
+    private byte[] FloatObjToByte(Float in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        float[] out = new float[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].floatValue();
+        }
+        return HDFNativeData.floatToByte(0, nelems, out);
+    }
+
+    private Float[] ByteToFloatObj(byte[] bin) {
+        float in[] = HDFNativeData.byteToFloat(bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Float[] out = new Float[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Float(in[i]);
+        }
+        return out;
+    }
+
+    private Float[] ByteToFloatObj(int start, int len, byte[] bin) {
+        float in[] = HDFNativeData.byteToFloat(start, len, bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Float[] out = new Float[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Float(in[i]);
+        }
+        return out;
+    }
+
+    private byte[] DoubleObjToByte(Double in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        double[] out = new double[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].doubleValue();
+        }
+        return HDFNativeData.doubleToByte(0, nelems, out);
+    }
+
+    private Double[] ByteToDoubleObj(byte[] bin) {
+        double in[] = HDFNativeData.byteToDouble(bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Double[] out = new Double[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Double(in[i]);
+        }
+        return out;
+    }
+
+    private Double[] ByteToDoubleObj(int start, int len, byte[] bin) {
+        double in[] = HDFNativeData.byteToDouble(start, len, bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Double[] out = new Double[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Double(in[i]);
+        }
+        return out;
+    }
+
+    private byte[] LongObjToByte(Long in[]) {
+        int nelems = java.lang.reflect.Array.getLength(in);
+        long[] out = new long[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = in[i].longValue();
+        }
+        return HDFNativeData.longToByte(0, nelems, out);
+    }
+
+    private Long[] ByteToLongObj(byte[] bin) {
+        long in[] = HDFNativeData.byteToLong(bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Long[] out = new Long[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Long(in[i]);
+        }
+        return out;
+    }
+
+    private Long[] ByteToLongObj(int start, int len, byte[] bin) {
+        long in[] = HDFNativeData.byteToLong(start, len, bin);
+        int nelems = java.lang.reflect.Array.getLength(in);
+        Long[] out = new Long[nelems];
+
+        for (int i = 0; i < nelems; i++) {
+            out[i] = new Long(in[i]);
+        }
+        return out;
+    }
+}
+
+/**
+ * This private class is used by HDFArray to discover the shape and type of an
+ * arbitrary array.
+ * <p>
+ * We use java.lang.reflection here.
+ */
+class ArrayDescriptor {
+
+    static String theType = "";
+    static Class<? extends Object> theClass = null;
+    static int[] dimlen = null;
+    static int[] dimstart = null;
+    static int[] currentindex = null;
+    static int[] bytetoindex = null;
+    static int totalSize = 0;
+    static Object[] objs = null;
+    static char NT = ' '; /* must be B,S,I,L,F,D, else error */
+    static int NTsize = 0;
+    static int dims = 0;
+    static String className;
+
+    public ArrayDescriptor(Object anArray) throws HDF5Exception {
+
+        Class<? extends Object> tc = anArray.getClass();
+        if (tc.isArray() == false) {
+            /* exception: not an array */
+            HDF5Exception ex = new HDF5JavaException(
+                    "ArrayDescriptor: not an array?: ");
+            throw (ex);
+        }
+
+        theClass = tc;
+
+        /*
+         * parse the type descriptor to discover the shape of the array
+         */
+        String ss = tc.toString();
+        theType = ss;
+        int n = 6;
+        dims = 0;
+        char c = ' ';
+        while (n < ss.length()) {
+            c = ss.charAt(n);
+            n++;
+            if (c == '[') {
+                dims++;
+            }
+        }
+
+        String css = ss.substring(ss.lastIndexOf('[') + 1);
+        NT = c; /* must be B,S,I,L,F,D, else error */
+        if (NT == 'B') {
+            NTsize = 1;
+        }
+        else if (NT == 'S') {
+            NTsize = 2;
+        }
+        else if ((NT == 'I') || (NT == 'F')) {
+            NTsize = 4;
+        }
+        else if ((NT == 'J') || (NT == 'D')) {
+            NTsize = 8;
+        }
+        else if (css.startsWith("Ljava.lang.Byte")) {
+            NT = 'L';
+            className = "java.lang.Byte";
+            NTsize = 1;
+        }
+        else if (css.startsWith("Ljava.lang.Short")) {
+            NT = 'L';
+            className = "java.lang.Short";
+            NTsize = 2;
+        }
+        else if (css.startsWith("Ljava.lang.Integer")) {
+            NT = 'L';
+            className = "java.lang.Integer";
+            NTsize = 4;
+        }
+        else if (css.startsWith("Ljava.lang.Float")) {
+            NT = 'L';
+            className = "java.lang.Float";
+            NTsize = 4;
+        }
+        else if (css.startsWith("Ljava.lang.Double")) {
+            NT = 'L';
+            className = "java.lang.Double";
+            NTsize = 8;
+        }
+        else if (css.startsWith("Ljava.lang.Long")) {
+            NT = 'L';
+            className = "java.lang.Long";
+            NTsize = 8;
+        }
+        else if (css.startsWith("Ljava.lang.String")) {
+            throw new HDF5JavaException(new String(
+                    "ArrayDesciptor: Error:  String array not supported yet"));
+        }
+        else {
+            /*
+             * exception: not a numeric type
+             */
+            throw new HDF5JavaException(new String(
+                    "ArrayDesciptor: Error:  array is not numeric (type is "
+                            + css + ") ?"));
+        }
+
+        /* fill in the table */
+        dimlen = new int[dims + 1];
+        dimstart = new int[dims + 1];
+        currentindex = new int[dims + 1];
+        bytetoindex = new int[dims + 1];
+        objs = new Object[dims + 1];
+
+        Object o = anArray;
+        objs[0] = o;
+        dimlen[0] = 1;
+        dimstart[0] = 0;
+        currentindex[0] = 0;
+        int i;
+        for (i = 1; i <= dims; i++) {
+            dimlen[i] = java.lang.reflect.Array.getLength(o);
+            o = java.lang.reflect.Array.get(o, 0);
+            objs[i] = o;
+            dimstart[i] = 0;
+            currentindex[i] = 0;
+        }
+
+        int j;
+        int dd;
+        bytetoindex[dims] = NTsize;
+        for (i = dims; i >= 0; i--) {
+            dd = NTsize;
+            for (j = i; j < dims; j++) {
+                dd *= dimlen[j + 1];
+            }
+            bytetoindex[i] = dd;
+        }
+
+        totalSize = bytetoindex[0];
+    }
+
+    /**
+     * Debug dump
+     */
+    public void dumpInfo() {
+        System.out.println("Type: " + theType);
+        System.out.println("Class: " + theClass);
+        System.out.println("NT: " + NT + " NTsize: " + NTsize);
+        System.out.println("Array has " + dims + " dimensions (" + totalSize
+                + " bytes)");
+        int i;
+        for (i = 0; i <= dims; i++) {
+            Class<? extends Object> tc = objs[i].getClass();
+            String ss = tc.toString();
+            System.out.println(i + ":  start " + dimstart[i] + ": len "
+                    + dimlen[i] + " current " + currentindex[i]
+                    + " bytetoindex " + bytetoindex[i] + " object " + objs[i]
+                    + " otype " + ss);
+        }
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/HDFNativeData.java b/source/java/ncsa/hdf/hdf5lib/HDFNativeData.java
new file mode 100644
index 0000000..7709453
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/HDFNativeData.java
@@ -0,0 +1,476 @@
+/****************************************************************************
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * This class encapsulates native methods to deal with arrays of numbers,
+ * converting from numbers to bytes and bytes to numbers.
+ * <p>
+ * These routines are used by class <b>HDFArray</b> to pass data to and from the
+ * HDF-5 library.
+ * <p>
+ * Methods xxxToByte() convert a Java array of primitive numbers (int, short,
+ * ...) to a Java array of bytes. Methods byteToXxx() convert from a Java array
+ * of bytes into a Java array of primitive numbers (int, short, ...)
+ * <p>
+ * Variant interfaces convert a section of an array, and also can convert to
+ * sub-classes of Java <b>Number</b>.
+ * <P>
+ * <b>See also:</b> ncsa.hdf.hdf5lib.HDFArray.
+ */
+
+public class HDFNativeData {
+
+    /**
+     * Convert an array of bytes into an array of ints
+     * 
+     * @param data
+     *            The input array of bytes
+     * @return an array of int
+     */
+    public synchronized static native int[] byteToInt(byte[] data);
+
+    /**
+     * Convert an array of bytes into an array of floats
+     * 
+     * @param data
+     *            The input array of bytes
+     * @return an array of float
+     */
+    public synchronized static native float[] byteToFloat(byte[] data);
+
+    /**
+     * Convert an array of bytes into an array of shorts
+     * 
+     * @param data
+     *            The input array of bytes
+     * @return an array of short
+     */
+    public synchronized static native short[] byteToShort(byte[] data);
+
+    /**
+     * Convert an array of bytes into an array of long
+     * 
+     * @param data
+     *            The input array of bytes
+     * @return an array of long
+     */
+    /*
+     * does this really work? C 'long' is 32 bits, Java 'long' is 64-bits. What
+     * does this routine actually do?
+     */
+    public synchronized static native long[] byteToLong(byte[] data);
+
+    /**
+     * Convert an array of bytes into an array of double
+     * 
+     * @param data
+     *            The input array of bytes
+     * @return an array of double
+     */
+    public synchronized static native double[] byteToDouble(byte[] data);
+
+    /**
+     * Convert a range from an array of bytes into an array of int
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param len
+     *            The number of 'int' to convert
+     * @param data
+     *            The input array of bytes
+     * @return an array of 'len' int
+     */
+    public synchronized static native int[] byteToInt(int start, int len,
+            byte[] data);
+
+    /**
+     * Convert 4 bytes from an array of bytes into a single int
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param data
+     *            The input array of bytes
+     * @return The integer value of the bytes.
+     */
+    public synchronized static int byteToInt(byte[] data, int start) {
+        int[] ival = new int[1];
+        ival = byteToInt(start, 1, data);
+        return (ival[0]);
+    }
+
+    /**
+     * Convert a range from an array of bytes into an array of short
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param len
+     *            The number of 'short' to convert
+     * @param data
+     *            The input array of bytes
+     * @return an array of 'len' short
+     */
+    public synchronized static native short[] byteToShort(int start, int len,
+            byte[] data);
+
+    /**
+     * Convert 2 bytes from an array of bytes into a single short
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param data
+     *            The input array of bytes
+     * @return The short value of the bytes.
+     */
+    public synchronized static short byteToShort(byte[] data, int start) {
+        short[] sval = new short[1];
+        sval = byteToShort(start, 1, data);
+        return (sval[0]);
+    }
+
+    /**
+     * Convert a range from an array of bytes into an array of float
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param len
+     *            The number of 'float' to convert
+     * @param data
+     *            The input array of bytes
+     * @return an array of 'len' float
+     */
+    public synchronized static native float[] byteToFloat(int start, int len,
+            byte[] data);
+
+    /**
+     * Convert 4 bytes from an array of bytes into a single float
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param data
+     *            The input array of bytes
+     * @return The float value of the bytes.
+     */
+    public synchronized static float byteToFloat(byte[] data, int start) {
+        float[] fval = new float[1];
+        fval = byteToFloat(start, 1, data);
+        return (fval[0]);
+    }
+
+    /**
+     * Convert a range from an array of bytes into an array of long
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param len
+     *            The number of 'long' to convert
+     * @param data
+     *            The input array of bytes
+     * @return an array of 'len' long
+     */
+    public synchronized static native long[] byteToLong(int start, int len,
+            byte[] data);
+
+    /**
+     * Convert 8 bytes from an array of bytes into a single long
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param data
+     *            The input array of bytes
+     * @return The long value of the bytes.
+     */
+    public synchronized static long byteToLong(byte[] data, int start) {
+        long[] lval = new long[1];
+        lval = byteToLong(start, 1, data);
+        return (lval[0]);
+    }
+
+    /**
+     * Convert a range from an array of bytes into an array of double
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param len
+     *            The number of 'double' to convert
+     * @param data
+     *            The input array of bytes
+     * @return an array of 'len' double
+     */
+    public synchronized static native double[] byteToDouble(int start, int len,
+            byte[] data);
+
+    /**
+     * Convert 8 bytes from an array of bytes into a single double
+     * 
+     * @param start
+     *            The position in the input array of bytes to start
+     * @param data
+     *            The input array of bytes
+     * @return The double value of the bytes.
+     */
+    public synchronized static double byteToDouble(byte[] data, int start) {
+        double[] dval = new double[1];
+        dval = byteToDouble(start, 1, data);
+        return (dval[0]);
+    }
+
+    /**
+     * Convert a range from an array of int into an array of bytes.
+     * 
+     * @param start
+     *            The position in the input array of int to start
+     * @param len
+     *            The number of 'int' to convert
+     * @param data
+     *            The input array of int
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] intToByte(int start, int len,
+            int[] data);
+
+    /**
+     * Convert a range from an array of short into an array of bytes.
+     * 
+     * @param start
+     *            The position in the input array of int to start
+     * @param len
+     *            The number of 'short' to convert
+     * @param data
+     *            The input array of short
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] shortToByte(int start, int len,
+            short[] data);
+
+    /**
+     * Convert a range from an array of float into an array of bytes.
+     * 
+     * @param start
+     *            The position in the input array of int to start
+     * @param len
+     *            The number of 'float' to convert
+     * @param data
+     *            The input array of float
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] floatToByte(int start, int len,
+            float[] data);
+
+    /**
+     * Convert a range from an array of long into an array of bytes.
+     * 
+     * @param start
+     *            The position in the input array of int to start
+     * @param len
+     *            The number of 'long' to convert
+     * @param data
+     *            The input array of long
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] longToByte(int start, int len,
+            long[] data);
+
+    /**
+     * Convert a range from an array of double into an array of bytes.
+     * 
+     * @param start
+     *            The position in the input array of double to start
+     * @param len
+     *            The number of 'double' to convert
+     * @param data
+     *            The input array of double
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] doubleToByte(int start, int len,
+            double[] data);
+
+    /**
+     * Convert a single byte into an array of one byte.
+     * <p>
+     * (This is a trivial method.)
+     * 
+     * @param data
+     *            The input byte
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] byteToByte(byte data);
+
+    /**
+     * Convert a single Byte object into an array of one byte.
+     * <p>
+     * (This is an almost trivial method.)
+     * 
+     * @param data
+     *            The input Byte
+     * @return an array of bytes
+     */
+    public synchronized static byte[] byteToByte(Byte data) {
+        return byteToByte(data.byteValue());
+    }
+
+    /**
+     * Convert a single int into an array of 4 bytes.
+     * 
+     * @param data
+     *            The input int
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] intToByte(int data);
+
+    /**
+     * Convert a single Integer object into an array of 4 bytes.
+     * 
+     * @param data
+     *            The input Integer
+     * @return an array of bytes
+     */
+    public synchronized static byte[] intToByte(Integer data) {
+        return intToByte(data.intValue());
+    }
+
+    /**
+     * Convert a single short into an array of 2 bytes.
+     * 
+     * @param data
+     *            The input short
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] shortToByte(short data);
+
+    /**
+     * Convert a single Short object into an array of 2 bytes.
+     * 
+     * @param data
+     *            The input Short
+     * @return an array of bytes
+     */
+    public synchronized static byte[] shortToByte(Short data) {
+        return shortToByte(data.shortValue());
+    }
+
+    /**
+     * Convert a single float into an array of 4 bytes.
+     * 
+     * @param data
+     *            The input float
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] floatToByte(float data);
+
+    /**
+     * Convert a single Float object into an array of 4 bytes.
+     * 
+     * @param data
+     *            The input Float
+     * @return an array of bytes
+     */
+    public synchronized static byte[] floatToByte(Float data) {
+        return floatToByte(data.floatValue());
+    }
+
+    /**
+     * Convert a single long into an array of 8 bytes.
+     * 
+     * @param data
+     *            The input long
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] longToByte(long data);
+
+    /**
+     * Convert a single Long object into an array of 8 bytes.
+     * 
+     * @param data
+     *            The input Long
+     * @return an array of bytes
+     */
+    public synchronized static byte[] longToByte(Long data) {
+        return longToByte(data.longValue());
+    }
+
+    /**
+     * Convert a single double into an array of 8 bytes.
+     * 
+     * @param data
+     *            The input double
+     * @return an array of bytes
+     */
+    public synchronized static native byte[] doubleToByte(double data);
+
+    /**
+     * Convert a single Double object into an array of 8 bytes.
+     * 
+     * @param data
+     *            The input Double
+     * @return an array of bytes
+     */
+    public synchronized static byte[] doubleToByte(Double data) {
+        return doubleToByte(data.doubleValue());
+    }
+
+    /**
+     * Create a Number object from an array of bytes.
+     * 
+     * @param barray
+     *            The bytes to be converted
+     * @param obj
+     *            Input object of the desired output class. Must be a sub-class
+     *            of Number.
+     * @return A Object of the type of obj.
+     */
+    public synchronized static Object byteToNumber(byte[] barray, Object obj)
+            throws HDF5Exception {
+        Class<?> theClass = obj.getClass();
+        String type = theClass.getName();
+        Object retobj = null;
+
+        if (type.equals("java.lang.Integer")) {
+            int[] i = ncsa.hdf.hdf5lib.HDFNativeData.byteToInt(0, 1, barray);
+            retobj = new Integer(i[0]);
+        }
+        else if (type.equals("java.lang.Byte")) {
+            retobj = new Byte(barray[0]);
+        }
+        else if (type.equals("java.lang.Short")) {
+            short[] f = ncsa.hdf.hdf5lib.HDFNativeData
+                    .byteToShort(0, 1, barray);
+            retobj = new Short(f[0]);
+        }
+        else if (type.equals("java.lang.Float")) {
+            float[] f = ncsa.hdf.hdf5lib.HDFNativeData
+                    .byteToFloat(0, 1, barray);
+            retobj = new Float(f[0]);
+        }
+        else if (type.equals("java.lang.Long")) {
+            long[] f = ncsa.hdf.hdf5lib.HDFNativeData.byteToLong(0, 1, barray);
+            retobj = new Long(f[0]);
+        }
+        else if (type.equals("java.lang.Double")) {
+            double[] f = ncsa.hdf.hdf5lib.HDFNativeData.byteToDouble(0, 1,
+                    barray);
+            retobj = new Double(f[0]);
+        }
+        else {
+            /* exception: unsupported type */
+            HDF5Exception ex = new HDF5JavaException(
+                    "byteToNumber: setfield bad type: " + obj + " " + type);
+            throw (ex);
+        }
+        return (retobj);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/Callbacks.java b/source/java/ncsa/hdf/hdf5lib/callbacks/Callbacks.java
new file mode 100644
index 0000000..357ae10
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/Callbacks.java
@@ -0,0 +1,18 @@
+package ncsa.hdf.hdf5lib.callbacks;
+
+/** All callback definitions must derive from this interface.  Any 
+ * derived interfaces must define a single public method  named "callback".
+ * You are responsible for deregistering your callback (if necessary)
+ * in its {@link Object#finalize} method.  If native code attempts to call
+ * a callback which has been GC'd, you will likely crash the VM.  If 
+ * there is no method to deregister the callback (e.g. <code>atexit</code>
+ * in the C library), you must ensure that you always keep a live reference
+ * to the callback object.<p>
+ * A callback should generally never throw an exception, since it doesn't
+ * necessarily have an encompassing Java environment to catch it.  Any
+ * exceptions thrown will be passed to the default callback exception
+ * handler. 
+ */
+public interface Callbacks {
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_cb.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_cb.java
new file mode 100644
index 0000000..95bab9f
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_cb.java
@@ -0,0 +1,19 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Diterate)
+public interface H5D_iterate_cb extends Callbacks {
+    int callback(byte[] elem, int elem_type, int ndim, long[] point, H5D_iterate_t op_data);
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_t.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_t.java
new file mode 100644
index 0000000..90a82d5
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5D_iterate_t.java
@@ -0,0 +1,7 @@
+package ncsa.hdf.hdf5lib.callbacks;
+
+public interface H5D_iterate_t {
+/**    public ArrayList iterdata = new ArrayList();
+  * Any derived interfaces must define the single public varaible as above.
+  */
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_cb.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_cb.java
new file mode 100644
index 0000000..d5f9414
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.callbacks;
+
+import ncsa.hdf.hdf5lib.structs.H5L_info_t;
+
+//Information class for link callback(for H5Lvisit/H5Lvisit_by_name)
+public interface H5L_iterate_cb extends Callbacks {
+    int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data);
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_t.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_t.java
new file mode 100644
index 0000000..444feb3
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5L_iterate_t.java
@@ -0,0 +1,7 @@
+package ncsa.hdf.hdf5lib.callbacks;
+
+public interface H5L_iterate_t {
+/**    public ArrayList iterdata = new ArrayList();
+  * Any derived interfaces must define the single public varaible as above.
+  */
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_cb.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_cb.java
new file mode 100644
index 0000000..d6e5b12
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.callbacks;
+
+import ncsa.hdf.hdf5lib.structs.H5O_info_t;
+
+//Information class for link callback(for H5Ovisit/H5Ovisit_by_name)
+public interface H5O_iterate_cb extends Callbacks {
+    int callback(int group, String name, H5O_info_t info, H5O_iterate_t op_data);
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_t.java b/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_t.java
new file mode 100644
index 0000000..ee79aea
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/callbacks/H5O_iterate_t.java
@@ -0,0 +1,7 @@
+package ncsa.hdf.hdf5lib.callbacks;
+
+public interface H5O_iterate_t {
+/**    public ArrayList iterdata = new ArrayList();
+  * Any derived interfaces must define the single public varaible as above.
+  */
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AtomException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AtomException.java
new file mode 100644
index 0000000..ddb51fa
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AtomException.java
@@ -0,0 +1,38 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ATOM</b>
+ */
+
+public class HDF5AtomException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5AtomException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5AtomException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AttributeException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AttributeException.java
new file mode 100644
index 0000000..ca0fde5
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5AttributeException.java
@@ -0,0 +1,36 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ATTR</b>
+ */
+public class HDF5AttributeException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5AttributeException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5AttributeException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5BtreeException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5BtreeException.java
new file mode 100644
index 0000000..ffa7a66
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5BtreeException.java
@@ -0,0 +1,36 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_BTREE</b>
+ */
+public class HDF5BtreeException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5BtreeException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5BtreeException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java
new file mode 100644
index 0000000..9c50741
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java
@@ -0,0 +1,36 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_PLINE</b>
+ */
+public class HDF5DataFiltersException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5DataFiltersException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5DataFiltersException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataStorageException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataStorageException.java
new file mode 100644
index 0000000..6772668
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataStorageException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_STORAGE</b>
+ */
+
+public class HDF5DataStorageException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5DataStorageException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5DataStorageException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java
new file mode 100644
index 0000000..7dbcfb5
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java
@@ -0,0 +1,36 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATASET</b>
+ */
+public class HDF5DatasetInterfaceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5DatasetInterfaceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5DatasetInterfaceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java
new file mode 100644
index 0000000..cd6e271
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATASPACE</b>
+ */
+
+public class HDF5DataspaceInterfaceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5DataspaceInterfaceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5DataspaceInterfaceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java
new file mode 100644
index 0000000..b677033
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATATYPE</b>
+ */
+
+public class HDF5DatatypeInterfaceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5DatatypeInterfaceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5DatatypeInterfaceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5Exception.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5Exception.java
new file mode 100644
index 0000000..4ef70e9
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5Exception.java
@@ -0,0 +1,63 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * <p>
+ * The class HDF5Exception returns errors from the Java HDF5 Interface.
+ * <p>
+ * Two sub-classes of HDF5Exception are defined:
+ * <p>
+ * <ol>
+ * <li> HDF5LibraryException -- errors raised the HDF5 library code
+ * <li> HDF5JavaException -- errors raised the HDF5 Java wrapper code
+ * </ol>
+ * <p>
+ * These exceptions are sub-classed to represent specific error conditions, as needed. In particular,
+ * HDF5LibraryException has a sub-class for each major error code returned by the HDF5 library.
+ */
+public class HDF5Exception extends RuntimeException
+{
+    private static final long serialVersionUID = 1L;
+
+    protected String detailMessage;
+
+    /**
+     * Constructs an <code>HDF5Exception</code> with no specified detail message.
+     */
+    public HDF5Exception()
+    {
+        super();
+    }
+
+    /**
+     * Constructs an <code>HDF5Exception</code> with the specified detail message.
+     * 
+     * @param message the detail message.
+     */
+    public HDF5Exception(final String message)
+    {
+        super();
+        detailMessage = message;
+    }
+
+    /**
+     * Returns the detail message of this exception
+     * 
+     * @return the detail message or <code>null</code> if this object does not have a detail message.
+     */
+    @Override
+    public String getMessage()
+    {
+        return detailMessage;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java
new file mode 100644
index 0000000..ff0c3d6
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_EFL</b>
+ */
+
+public class HDF5ExternalFileListException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5ExternalFileListException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5ExternalFileListException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java
new file mode 100644
index 0000000..881d39e
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_FILE</b>
+ */
+
+public class HDF5FileInterfaceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5FileInterfaceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5FileInterfaceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileNotFoundException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileNotFoundException.java
new file mode 100644
index 0000000..f5d808e
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FileNotFoundException.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2011 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+import java.io.File;
+
+/**
+ * <p>
+ * The class HDF5JavaException returns errors from the Java wrapper of theHDF5 library.
+ * <p>
+ * This exception communicates that a file is not found or cannot be opened.
+ *
+ * @author Bernd Rinn
+ */
+public class HDF5FileNotFoundException extends HDF5JavaException
+{
+    private static final long serialVersionUID = 1L;
+
+    public HDF5FileNotFoundException(File file, String msg)
+    {
+        super(msg + " (" + file.getAbsolutePath() + ")");
+    }
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java
new file mode 100644
index 0000000..a661ab9
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ARGS</b>
+ */
+
+public class HDF5FunctionArgumentException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5FunctionArgumentException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5FunctionArgumentException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java
new file mode 100644
index 0000000..101b533
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_FUNC</b>
+ */
+
+public class HDF5FunctionEntryExitException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5FunctionEntryExitException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5FunctionEntryExitException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5HeapException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5HeapException.java
new file mode 100644
index 0000000..600bdc4
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5HeapException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_HEAP</b>
+ */
+
+public class HDF5HeapException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5HeapException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5HeapException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java
new file mode 100644
index 0000000..2ee68a3
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java
@@ -0,0 +1,38 @@
+/****************************************************************************
+ * NCSA HDF5
+ *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_INTERNAL</b>
+ */
+
+public class HDF5InternalErrorException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5InternalErrorException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5InternalErrorException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5JavaException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5JavaException.java
new file mode 100644
index 0000000..9d998d6
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5JavaException.java
@@ -0,0 +1,41 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * <p>
+ * The class HDF5JavaException returns errors from the Java wrapper of theHDF5 library.
+ * <p>
+ * These errors include Java configuration errors, security violations, and resource exhaustion.
+ */
+public class HDF5JavaException extends HDF5Exception
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5JavaException</code> with no specified detail message.
+     */
+    public HDF5JavaException()
+    {
+        super();
+    }
+
+    /**
+     * Constructs an <code>HDF5JavaException</code> with the specified detail message.
+     * 
+     * @param s the detail message.
+     */
+    public HDF5JavaException(final String s)
+    {
+        super(s);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException.java
new file mode 100644
index 0000000..fee5f6c
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LibraryException.java
@@ -0,0 +1,256 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5General.H5open;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+
+
+/**
+ * <p>
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * Each major error code from the HDF-5 Library is represented by a sub-class of this class, and by
+ * default the 'detailedMessage' is set according to the minor error code from the HDF-5 Library.
+ * <p>
+ * For major and minor error codes, see <b>H5Epublic.h</b> in the HDF-5 library.
+ * <p>
+ */
+
+public class HDF5LibraryException extends HDF5Exception
+{
+    private static final int UNKNOWN = -1;
+
+    private static final long serialVersionUID = 1L;
+
+    private final int majorErrorNumber;
+
+    private final int minorErrorNumber;
+
+    private final String majorErrorMessage;
+
+    private final String minorErrorMessage;
+
+    private final String hdf5ErrorStackString;
+
+    private final String hdf5ErrorStackLastElementString;
+
+    /**
+     * Constructs an <code>HDF5LibraryException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5LibraryException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorMessage + ":" + minorErrorMessage);
+        // this code forces the loading of the HDF-5 library
+        // to assure that the native methods are available
+        try
+        {
+            H5open();
+        } catch (final Exception e)
+        {
+        }
+        this.majorErrorNumber = majorErrorNumber;
+        this.majorErrorMessage = majorErrorMessage;
+        this.minorErrorNumber = minorErrorNumber;
+        this.minorErrorMessage = minorErrorMessage;
+        this.hdf5ErrorStackString = retrieveHDF5ErrorStackAsString();
+        this.hdf5ErrorStackLastElementString = getLastErrorStackElement(hdf5ErrorStackString);
+    }
+
+    private static String getLastErrorStackElement(String hdf5ErrorStackString)
+    {
+        int idx = hdf5ErrorStackString.length() - 3;
+        int lastLineBreakIdx = hdf5ErrorStackString.length();
+        while (--idx > 0)
+        {
+            if (hdf5ErrorStackString.charAt(idx) == '\n')
+            {
+                lastLineBreakIdx = idx;
+            }
+            if (hdf5ErrorStackString.substring(idx - 1, idx + 3).equals("\n  #"))
+            {
+                idx += 3;
+                while (idx < hdf5ErrorStackString.length()
+                        && hdf5ErrorStackString.charAt(idx) != ' ')
+                {
+                    ++idx;
+                }
+                return hdf5ErrorStackString.substring(idx + 1, lastLineBreakIdx);
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Constructs an <code>HDF5LibraryException</code> with the specified detail message.
+     * 
+     * @param errorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5LibraryException(final String errorMessage)
+    {
+        super(errorMessage);
+        // this code forces the loading of the HDF-5 library
+        // to assure that the native methods are available
+        try
+        {
+            H5open();
+        } catch (final Exception e)
+        {
+        }
+        this.majorErrorNumber = UNKNOWN;
+        this.majorErrorMessage = errorMessage;
+        this.minorErrorNumber = UNKNOWN;
+        this.minorErrorMessage = "";
+        this.hdf5ErrorStackString = "No error stack";
+        this.hdf5ErrorStackLastElementString = null;
+    }
+
+    @Override
+    public String getMessage()
+    {
+        if (hdf5ErrorStackLastElementString != null)
+        {
+            return super.getMessage() + " [\"" + hdf5ErrorStackLastElementString + "\"]";
+        } else
+        {
+            return super.getMessage();
+        }
+    }
+
+    /**
+     * Get the major error number of the first error on the HDF5 library error stack.
+     * 
+     * @return the major error number
+     */
+    public int getMajorErrorNumber()
+    {
+        return majorErrorNumber;
+    }
+
+    /**
+     * Return a error message for the major error number of this exception.
+     * <p>
+     * These messages come from <b>H5Epublic.h</b>.
+     * 
+     * @return the string of the minor error
+     */
+    public String getMajorError()
+    {
+        return majorErrorMessage;
+    }
+
+    /**
+     * Get the minor error number of the first error on the HDF5 library error stack.
+     * 
+     * @return the minor error number
+     */
+    public int getMinorErrorNumber()
+    {
+        return minorErrorNumber;
+    }
+
+    /**
+     * Return a error message for the minor error number of this exception.
+     * <p>
+     * These messages come from <b>H5Epublic.h</b>.
+     * 
+     * @return the string of the minor error
+     */
+    public String getMinorError()
+    {
+        return minorErrorMessage;
+    }
+
+    /**
+     * Returns the error stack as retrieved from the HDF5 library as a string.
+     */
+    private String retrieveHDF5ErrorStackAsString()
+    {
+        try
+        {
+            final File tempFile = File.createTempFile("HDF5_error_stack", ".txt");
+            try
+            {
+                printStackTrace0(tempFile.getPath());
+                return FileUtils.readFileToString(tempFile).trim();
+            } finally
+            {
+                tempFile.delete();
+            }
+        } catch (IOException ex)
+        {
+            System.err.println("Cannot create error stack file.");
+            ex.printStackTrace();
+            return null;
+        }
+    }
+
+    /**
+     * Returns the error stack from the HDF5 library as a string.
+     */
+    public String getHDF5ErrorStackAsString()
+    {
+        return hdf5ErrorStackString;
+    }
+
+    /**
+     * Prints this <code>HDF5LibraryException</code>, the HDF-5 Library error stack, and and the
+     * Java stack trace to the standard error stream.
+     */
+    @Override
+    public void printStackTrace()
+    {
+        System.err.println(getHDF5ErrorStackAsString()); // the HDF-5 Library error stack
+        super.printStackTrace(); // the Java stack trace
+    }
+
+    /**
+     * Prints this <code>HDF5LibraryException</code> the HDF-5 Library error stack, and and the Java
+     * stack trace to the specified print stream.
+     */
+    public void printStackTrace(final java.io.File f)
+    {
+        if ((f == null) || !f.exists() || f.isDirectory() || !f.canWrite())
+        {
+            printStackTrace();
+        } else
+        {
+            try
+            {
+                final java.io.FileOutputStream o = new java.io.FileOutputStream(f);
+                final java.io.PrintWriter p = new java.io.PrintWriter(o);
+                p.println(getHDF5ErrorStackAsString()); // the HDF-5 Library error stack
+                super.printStackTrace(p); // the Java stack trace
+                p.close();
+            } catch (final Exception ex)
+            {
+                System.err.println(this);
+            }
+        }
+    }
+
+    /*
+     * This private method calls the HDF-5 library to extract the error codes and error stack.
+     */
+    private native void printStackTrace0(String s);
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java
new file mode 100644
index 0000000..46632f6
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_IO</b>
+ */
+
+public class HDF5LowLevelIOException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5LowLevelIOException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5LowLevelIOException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java
new file mode 100644
index 0000000..2c7c133
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_CACHE</b>
+ */
+
+public class HDF5MetaDataCacheException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5MetaDataCacheException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5MetaDataCacheException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java
new file mode 100644
index 0000000..5dabcfe
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_OHDR</b>
+ */
+
+public class HDF5ObjectHeaderException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5ObjectHeaderException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5ObjectHeaderException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java
new file mode 100644
index 0000000..ed2abf1
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_PLIST</b>
+ */
+
+public class HDF5PropertyListInterfaceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5PropertyListInterfaceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5PropertyListInterfaceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ReferenceException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ReferenceException.java
new file mode 100644
index 0000000..254d6be
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ReferenceException.java
@@ -0,0 +1,31 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+public class HDF5ReferenceException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5ReferenceException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5ReferenceException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java
new file mode 100644
index 0000000..da9b158
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_RESOURCE</b>
+ */
+
+public class HDF5ResourceUnavailableException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5FunctionArgumentException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5ResourceUnavailableException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SpaceRankMismatch.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SpaceRankMismatch.java
new file mode 100644
index 0000000..378ad0e
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SpaceRankMismatch.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2014 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * An exception for signaling that the data space of a data set has an unexpected rank.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5SpaceRankMismatch extends HDF5JavaException
+{
+    private static final long serialVersionUID = 1L;
+
+    private final int spaceRankExpected;
+
+    private final int spaceRankFound;
+
+    public HDF5SpaceRankMismatch(int spaceRankExpected, int spaceRankFound)
+    {
+        super("Data Set is expected to be of rank " + spaceRankExpected + " (rank="
+                + spaceRankFound + ")");
+        this.spaceRankExpected = spaceRankExpected;
+        this.spaceRankFound = spaceRankFound;
+    }
+
+    public int getSpaceRankExpected()
+    {
+        return spaceRankExpected;
+    }
+
+    public int getSpaceRankFound()
+    {
+        return spaceRankFound;
+    }
+
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java
new file mode 100644
index 0000000..1bd09b2
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java
@@ -0,0 +1,37 @@
+/****************************************************************************
+ * NCSA HDF5                                                                 *
+ * National Comptational Science Alliance                                   *
+ * University of Illinois at Urbana-Champaign                               *
+ * 605 E. Springfield, Champaign IL 61820                                   *
+ *                                                                          *
+ * For conditions of distribution and use, see the accompanying             *
+ * hdf-java/COPYING file.                                                  *
+ *                                                                          *
+ ****************************************************************************/
+
+package ncsa.hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_SYM</b>
+ */
+
+public class HDF5SymbolTableException extends HDF5LibraryException
+{
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructs an <code>HDF5SymbolTableException</code> with the specified detail message.
+     * 
+     * @param majorErrorNumber The major error number of the HDF5 library.
+     * @param majorErrorMessage The error message for the major error number of the HDF5 library.
+     * @param minorErrorNumber The minor error number of the HDF5 library.
+     * @param minorErrorMessage The error message for the minor error number of the HDF5 library.
+     */
+    public HDF5SymbolTableException(final int majorErrorNumber, final String majorErrorMessage,
+            final int minorErrorNumber, final String minorErrorMessage)
+    {
+        super(majorErrorNumber, majorErrorMessage, minorErrorNumber, minorErrorMessage);
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5AC_cache_config_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5AC_cache_config_t.java
new file mode 100644
index 0000000..dc269a5
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5AC_cache_config_t.java
@@ -0,0 +1,97 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for H5Pget_mdc_config/H5Pset_mdc_config
+public class H5AC_cache_config_t implements Serializable{
+    private static final long serialVersionUID = -6748085696476149972L;
+    // general configuration fields:
+    public int              version;
+    public boolean          rpt_fcn_enabled;
+    public boolean          open_trace_file;
+    public boolean          close_trace_file;
+    public String           trace_file_name;
+    public boolean          evictions_enabled;
+    public boolean          set_initial_size;
+    public long             initial_size;
+    public double           min_clean_fraction;
+    public long             max_size;
+    public long             min_size;
+    public long             epoch_length;
+    // size increase control fields:
+    public int              incr_mode;  // H5C_cache_incr_mode
+    public double           lower_hr_threshold;
+    public double           increment;
+    public boolean          apply_max_increment;
+    public long             max_increment;
+    public int              flash_incr_mode;    // H5C_cache_flash_incr_mode 
+    public double           flash_multiple;
+    public double           flash_threshold;
+    // size decrease control fields:
+    public int              decr_mode;  // H5C_cache_decr_mode 
+    public double           upper_hr_threshold;
+    public double           decrement;
+    public boolean          apply_max_decrement;
+    public long             max_decrement;
+    public int              epochs_before_eviction;
+    public boolean          apply_empty_reserve;
+    public double           empty_reserve;
+    // parallel configuration fields: 
+    public int              dirty_bytes_threshold;
+    public int              metadata_write_strategy;
+    
+    public H5AC_cache_config_t (int version, boolean rpt_fcn_enabled, boolean open_trace_file, 
+            boolean close_trace_file, String trace_file_name, boolean evictions_enabled, 
+            boolean set_initial_size, long initial_size, double min_clean_fraction, long max_size, 
+            long min_size, long epoch_length, int incr_mode, double lower_hr_threshold, 
+            double increment, boolean apply_max_increment, long max_increment, int flash_incr_mode, 
+            double flash_multiple, double flash_threshold, int decr_mode, double upper_hr_threshold, 
+            double decrement, boolean apply_max_decrement, long max_decrement, 
+            int epochs_before_eviction, boolean apply_empty_reserve, double empty_reserve, 
+            int dirty_bytes_threshold, int metadata_write_strategy) 
+    {
+        this.version = version;
+        this.rpt_fcn_enabled = rpt_fcn_enabled;
+        this.open_trace_file = open_trace_file;
+        this.close_trace_file = close_trace_file;
+        this.trace_file_name = trace_file_name;
+        this.evictions_enabled = evictions_enabled;
+        this.set_initial_size = set_initial_size;
+        this.initial_size = initial_size;
+        this.min_clean_fraction = min_clean_fraction;
+        this.max_size = max_size;
+        this.min_size = min_size;
+        this.epoch_length = epoch_length;
+        this.incr_mode = incr_mode;
+        this.lower_hr_threshold = lower_hr_threshold;
+        this.increment = increment;
+        this.apply_max_increment = apply_max_increment;
+        this.max_increment = flash_incr_mode;
+        this.flash_incr_mode = flash_incr_mode;
+        this.flash_multiple = flash_multiple;
+        this.flash_threshold = flash_threshold;
+        this.decr_mode = decr_mode;
+        this.upper_hr_threshold = upper_hr_threshold;
+        this.decrement = decrement;
+        this.apply_max_decrement = apply_max_decrement;
+        this.max_decrement = max_decrement;
+        this.epochs_before_eviction = epochs_before_eviction;
+        this.apply_empty_reserve = apply_empty_reserve;
+        this.empty_reserve = empty_reserve;
+        this.dirty_bytes_threshold = dirty_bytes_threshold;
+        this.metadata_write_strategy = metadata_write_strategy;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5A_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5A_info_t.java
new file mode 100644
index 0000000..6ebedde
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5A_info_t.java
@@ -0,0 +1,32 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for Attribute (For H5Aget_info/H5Aget_info_by_idx/H5Aget_info_by_name)
+public class H5A_info_t implements Serializable{
+    private static final long serialVersionUID = 2791443594041667613L;
+    public boolean corder_valid; // Indicate if creation order is valid
+    public long corder; // Creation order of attribute
+    public int cset; // Character set of attribute name
+    public long data_size; // Size of raw data
+
+    H5A_info_t(boolean corder_valid, long corder, int cset, long data_size) {
+        this.corder_valid = corder_valid;
+        this.corder = corder;
+        this.cset = cset;
+        this.data_size = data_size;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5G_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5G_info_t.java
new file mode 100644
index 0000000..d25cf71
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5G_info_t.java
@@ -0,0 +1,25 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for group (for H5Gget_info/H5Gget_info_by_name/H5Gget_info_by_idx)
+public class H5G_info_t implements Serializable{
+    private static final long serialVersionUID = -3746463015312132912L;
+    public int storage_type; // Type of storage for links in group
+    public long nlinks; // Number of links in group
+    public long max_corder; // Current max. creation order value for group
+    public boolean mounted; // Whether group has a file mounted on it
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5L_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5L_info_t.java
new file mode 100644
index 0000000..1f6101b
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5L_info_t.java
@@ -0,0 +1,36 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for link (for H5Lget_info/H5Lget_info_by_idx)
+public class H5L_info_t implements Serializable{
+    private static final long serialVersionUID = -4754320605310155033L;
+    public int     type;
+    public boolean corder_valid;
+    public long    corder;
+    public int     cset;
+    public long    address_val_size;
+    
+    H5L_info_t (int type, boolean corder_valid, long corder,
+        int cset, long address_val_size) 
+    {
+        this.type = type;
+        this.corder_valid = corder_valid;
+        this.corder = corder;
+        this.cset = cset;
+        this.address_val_size = address_val_size;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t.java
new file mode 100644
index 0000000..0c3127a
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5O_hdr_info_t.java
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+// Information struct for object header metadata (for H5Oget_info/H5Oget_info_by_name/H5Oget_info_by_idx)
+public class H5O_hdr_info_t implements Serializable{
+    private static final long serialVersionUID = 7883826382952577189L;
+    public int version;       /* Version number of header format in file */
+    public int nmesgs;        /* Number of object header messages */
+    public int nchunks;       /* Number of object header chunks */
+    public int flags;         /* Object header status flags */
+    public long space_total;  /* Total space for storing object header in file */
+    public long space_meta;   /* Space within header for object header metadata information */
+    public long space_mesg;   /* Space within header for actual message information */
+    public long space_free;   /* Free space within object header */
+    public long mesg_present; /* Flags to indicate presence of message type in header */
+    public long mesg_shared;  /* Flags to indicate message type is shared in header */
+    
+    H5O_hdr_info_t (int version, int nmesgs, int nchunks, int flags, 
+        long space_total, long space_meta, long space_mesg, long space_free, 
+        long mesg_present, long mesg_shared) 
+    {
+        this.version = version;
+        this.nmesgs = nmesgs;
+        this.nchunks = nchunks;
+        this.flags = flags;
+        this.space_total = space_total;
+        this.space_meta = space_meta;
+        this.space_mesg = space_mesg;
+        this.space_free = space_free;
+        this.mesg_present = mesg_present;
+        this.mesg_shared = mesg_shared;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5O_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5O_info_t.java
new file mode 100644
index 0000000..6d48d8a
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5O_info_t.java
@@ -0,0 +1,52 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for object (for H5Oget_info/H5Oget_info_by_name/H5Oget_info_by_idx)
+public class H5O_info_t implements Serializable{
+    private static final long serialVersionUID = 4691681163544054518L;
+    public long   fileno;     /* File number that object is located in */
+    public long   addr;       /* Object address in file   */
+    public int    type;       /* Basic object type (group, dataset, etc.) */
+    public int    rc;         /* Reference count of object    */
+    public long   atime;      /* Access time          */
+    public long   mtime;      /* Modification time        */
+    public long   ctime;      /* Change time          */
+    public long   btime;      /* Birth time           */
+    public long   num_attrs;  /* # of attributes attached to object */
+    public H5O_hdr_info_t   hdr;            /* Object header information */
+    /* Extra metadata storage for obj & attributes */
+    public H5_ih_info_t     meta_size_obj;  /* v1/v2 B-tree & local/fractal heap for groups, B-tree for chunked datasets */
+    public H5_ih_info_t     meta_size_attr; /* v2 B-tree & heap for attributes */
+    
+    public H5O_info_t (long fileno, long addr, int type,
+        int rc, long num_attrs, long atime, long mtime, long ctime, long btime,
+        H5O_hdr_info_t hdr, H5_ih_info_t meta_size_obj, H5_ih_info_t meta_size_attr) 
+    {
+        this.fileno = fileno;
+        this.addr = addr;
+        this.type = type;
+        this.rc = rc;
+        this.num_attrs = num_attrs;
+        this.atime = atime;
+        this.mtime = mtime;
+        this.ctime = ctime;
+        this.btime = btime;
+        this.hdr = hdr;
+        this.meta_size_obj = meta_size_obj;
+        this.meta_size_attr = meta_size_attr;
+    }
+}
diff --git a/source/java/ncsa/hdf/hdf5lib/structs/H5_ih_info_t.java b/source/java/ncsa/hdf/hdf5lib/structs/H5_ih_info_t.java
new file mode 100644
index 0000000..0793e88
--- /dev/null
+++ b/source/java/ncsa/hdf/hdf5lib/structs/H5_ih_info_t.java
@@ -0,0 +1,29 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group.                                               *
+ * Copyright by the Board of Trustees of the University of Illinois.         *
+ * All rights reserved.                                                      *
+ *                                                                           *
+ * This file is part of HDF Java Products. The full HDF Java copyright       *
+ * notice, including terms governing use, modification, and redistribution,  *
+ * is contained in the file, COPYING.  COPYING can be found at the root of   *
+ * the source code distribution tree. You can also access it online  at      *
+ * http://www.hdfgroup.org/products/licenses.html.  If you do not have       *
+ * access to the file, you may request a copy from help at hdfgroup.org.        *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package ncsa.hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for group (for H5Gget_info/H5Gget_info_by_name/H5Gget_info_by_idx)
+public class H5_ih_info_t implements Serializable {
+    private static final long serialVersionUID = -142238015615462707L;
+    public long     index_size;     /* btree and/or list */
+    public long     heap_size;
+    
+    H5_ih_info_t (long index_size, long heap_size) 
+    {
+        this.index_size = index_size;
+        this.heap_size = heap_size;
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/BitSetConversionTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/BitSetConversionTest.java
new file mode 100644
index 0000000..4ffaf82
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/BitSetConversionTest.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.util.Arrays;
+import java.util.BitSet;
+
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import ch.rinn.restrictions.Friend;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+/**
+ * Test cases for the BitSet conversion from / to storage form.
+ * 
+ * @author Bernd Rinn
+ */
+ at Friend(toClasses = BitSetConversionUtils.class)
+public class BitSetConversionTest
+{
+    private BitSet create(final Integer... indices)
+    {
+        final BitSet bs = new BitSet();
+        for (final int index : indices)
+        {
+            bs.set(index);
+        }
+        return bs;
+    }
+
+    private BitSet[] create2D(final int length, final Integer... indices)
+    {
+        final BitSet[] bs = new BitSet[length];
+        for (int i = 0; i < bs.length; ++i)
+        {
+            bs[i] = new BitSet();
+            for (final int index : indices)
+            {
+                bs[i].set(index);
+            }
+        }
+        return bs;
+    }
+
+    @DataProvider
+    public Object[][] createBitSets()
+    {
+        final BitSet full4w = new BitSet();
+        full4w.set(0, 256);
+
+        return new Object[][]
+            {
+                { create() },
+                { create(0) },
+                { create(31) },
+                { create(64) },
+                { create(128) },
+                { create(63, 191) },
+                { create(64, 192) },
+                { create(17, 88, 155) },
+                { full4w }, };
+    }
+
+    @Test(dataProvider = "createBitSets")
+    public void testBitSetRoundTripGeneric(final BitSet bs)
+    {
+        final long[] bsArray = BitSetConversionUtils.toStorageFormGeneric(bs);
+        final BitSet bs2 = BitSetConversionUtils.fromStorageFormGeneric(bsArray, 0, bsArray.length);
+        assertEquals(bs, bs2);
+    }
+
+    @Test(dataProvider = "createBitSets")
+    public void testBitSetRoundTrip(final BitSet bs)
+    {
+        final long[] bsArray = BitSetConversionUtils.toStorageForm(bs);
+        final BitSet bs2 = BitSetConversionUtils.fromStorageForm(bsArray);
+        assertEquals(bs, bs2);
+    }
+
+    @DataProvider
+    public Object[][] createBitSetArrays()
+    {
+        final BitSet[] full4w = new BitSet[] { new BitSet(), new BitSet(), new BitSet(), };
+        full4w[0].set(0, 256);
+        full4w[1].set(0, 256);
+        full4w[2].set(0, 256);
+
+        return new Object[][]
+            {
+                { create2D(3) },
+                { create2D(3, 0) },
+                { create2D(3, 31) },
+                { create2D(3, 64) },
+                { create2D(3, 128) },
+                { create2D(3, 63, 191) },
+                { create2D(1, 64, 192) },
+                { create2D(2, 17, 88, 155) },
+                { full4w }, };
+    }
+
+    @Test(dataProvider = "createBitSetArrays")
+    public void testBitSetArrayRoundTrip(final BitSet[] bs)
+    {
+        final int maxLength = BitSetConversionUtils.getMaxLength(bs);
+        final long[] bsArray = BitSetConversionUtils.toStorageForm(bs, maxLength);
+        final BitSet bs2[] = BitSetConversionUtils.fromStorageForm2D(new MDLongArray(bsArray, new int[] { maxLength, bs.length }));
+        assertTrue(Arrays.equals(bs, bs2));
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/Dir2HDF5.java b/sourceTest/java/ch/systemsx/cisd/hdf5/Dir2HDF5.java
new file mode 100644
index 0000000..7337409
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/Dir2HDF5.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.lang.time.StopWatch;
+
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.h5ar.HDF5ArchiverFactory;
+import ch.systemsx.cisd.hdf5.h5ar.IArchiveEntryVisitor;
+import ch.systemsx.cisd.hdf5.h5ar.IHDF5Archiver;
+
+/**
+ * Creates a HDF5 file from a directory.
+ * 
+ * @author Bernd Rinn
+ */
+public class Dir2HDF5
+{
+
+    public static void main(String[] args) throws IOException
+    {
+        if (args.length == 0)
+        {
+            System.err.println("Syntax: Dir2HDF5 <hdf5 file> [<file or dir> ...]");
+            System.exit(1);
+        }
+        final File hdf5File = new File(args[0]);
+        final StopWatch watch = new StopWatch();
+        watch.start();
+        final IHDF5Archiver archiver =
+                HDF5ArchiverFactory.open(hdf5File, true, FileFormat.ALLOW_1_8, null);
+        if (args.length > 1)
+        {
+            for (int i = 1; i < args.length; ++i)
+            {
+                archiver.archiveFromFilesystem(new File(args[i]), IArchiveEntryVisitor.NONVERBOSE_VISITOR);
+            }
+        } else
+        {
+            archiver.archiveFromFilesystem(new File("."), IArchiveEntryVisitor.NONVERBOSE_VISITOR);
+        }
+        archiver.close();
+        watch.stop();
+        System.out.println("Creating hdf5 archive took " + watch);
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF52Dir.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF52Dir.java
new file mode 100644
index 0000000..7c92cb8
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF52Dir.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.lang.time.StopWatch;
+
+import ch.systemsx.cisd.hdf5.h5ar.HDF5ArchiverFactory;
+import ch.systemsx.cisd.hdf5.h5ar.IHDF5ArchiveReader;
+import ch.systemsx.cisd.hdf5.h5ar.IArchiveEntryVisitor;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF52Dir
+{
+
+    public static void main(String[] args) throws IOException
+    {
+        if (args.length != 1 && args.length != 2 && args.length != 3)
+        {
+            System.err.println("Syntax: HDF52Dir <hdf5 file> [<path in file>] [<root>]");
+            System.exit(1);
+        }
+        final File hdf5File = new File(args[0]);
+        final String pathInFile = (args.length > 1) ? args[1] : "/";
+        final File rootDir = new File((args.length > 2) ? args[2] : ".");
+        if (rootDir.isDirectory() == false)
+        {
+            System.err.println("Path '" + rootDir + "' is not a directory.");
+            System.exit(1);
+        }
+        final StopWatch watch = new StopWatch();
+        watch.start();
+        final IHDF5ArchiveReader reader = HDF5ArchiverFactory.openForReading(hdf5File);
+        reader.extractToFilesystem(rootDir, pathInFile, IArchiveEntryVisitor.NONVERBOSE_VISITOR);
+        reader.close();
+        watch.stop();
+        System.out.println("Extracting hdf5 file took " + watch);
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ArrayTypeFloatWriter.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ArrayTypeFloatWriter.java
new file mode 100644
index 0000000..72a13ee
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ArrayTypeFloatWriter.java
@@ -0,0 +1,232 @@
+/*
+ * Copyright 2009 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5A.H5Aclose;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5A.H5Acreate;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5S.H5Sclose;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5S.H5Screate_simple;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32BE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32LE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F64BE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_DOUBLE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_FLOAT;
+
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * A writer for array type data sets.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5ArrayTypeFloatWriter
+{
+
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5ArrayTypeFloatWriter(HDF5Writer writer)
+    {
+        baseWriter = writer.getBaseWriter();
+    }
+
+    public void writeFloatArrayBigEndian(final String objectPath, final float[] data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_IEEE_F32BE, new long[]
+                                { data.length }, 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeDoubleArrayBigEndian(final String objectPath, final double[] data,
+            final HDF5FloatStorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, H5T_IEEE_F64BE, new long[]
+                                { data.length }, 4, features, registry);
+                    H5Dwrite(dataSetId, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeFloatArrayArrayType(final String objectPath, final float[] data)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int memoryTypeId =
+                            baseWriter.h5.createArrayType(H5T_NATIVE_FLOAT, data.length, registry);
+                    final int storageTypeId =
+                            baseWriter.h5.createArrayType(H5T_IEEE_F32LE, data.length, registry);
+                    final int dataSetId =
+                            baseWriter.h5.createScalarDataSet(baseWriter.fileId, storageTypeId,
+                                    objectPath, true, registry);
+                    H5Dwrite(dataSetId, memoryTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeFloatArrayArrayType(final String objectPath, final MDFloatArray data)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int memoryTypeId =
+                            baseWriter.h5.createArrayType(H5T_NATIVE_FLOAT, data.dimensions(),
+                                    registry);
+                    final int storageTypeId =
+                            baseWriter.h5.createArrayType(H5T_IEEE_F32LE, data.dimensions(),
+                                    registry);
+                    final int dataSetId =
+                            baseWriter.h5.createScalarDataSet(baseWriter.fileId, storageTypeId,
+                                    objectPath, true, registry);
+                    H5Dwrite(dataSetId, memoryTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void writeFloat2DArrayArrayType1DSpace1d(final String objectPath, final MDFloatArray data)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert data.rank() == 2;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int memoryTypeId =
+                            baseWriter.h5.createArrayType(H5T_NATIVE_FLOAT, data.dimensions()[1],
+                                    registry);
+                    final int storageTypeId =
+                            baseWriter.h5.createArrayType(H5T_IEEE_F32LE, data.dimensions()[1], registry);
+                    final int dataSetId =
+                            baseWriter.h5.createDataSet(baseWriter.fileId, new long[]
+                                { data.dimensions()[0] }, null, storageTypeId,
+                                    HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS, objectPath,
+                                    HDF5StorageLayout.CONTIGUOUS, FileFormat.ALLOW_1_8, registry);
+                    H5Dwrite(dataSetId, memoryTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    public void setFloatArrayAttributeDimensional(final String objectPath, final String name,
+            final float[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = new long[]
+                        { value.length };
+                    final int dataSpaceId =
+                            H5Screate_simple(dimensions.length, dimensions, dimensions);
+                    registry.registerCleanUp(new Runnable()
+                        {
+                            @Override
+                            public void run()
+                            {
+                                H5Sclose(dataSpaceId);
+                            }
+                        });
+                    final int objectId =
+                            baseWriter.h5.openObject(baseWriter.fileId, objectPath, registry);
+                    final int attributeId =
+                            createAttribute(objectId, name, H5T_IEEE_F32LE, dataSpaceId, registry);
+                    baseWriter.h5.writeAttribute(attributeId, H5T_NATIVE_FLOAT,
+                            HDFNativeData.floatToByte(value));
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    private int createAttribute(int locationId, String attributeName, int dataTypeId,
+            int dataSpaceId, ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                H5Acreate(locationId, attributeName, dataTypeId, dataSpaceId, H5P_DEFAULT,
+                        H5P_DEFAULT);
+        registry.registerCleanUp(new Runnable()
+            {
+                @Override
+                public void run()
+                {
+                    H5Aclose(attributeId);
+                }
+            });
+        return attributeId;
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5Extract.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5Extract.java
new file mode 100644
index 0000000..c749c5b
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5Extract.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.time.StopWatch;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF5Extract
+{
+
+    public static void main(String[] args) throws IOException
+    {
+        if (args.length != 2)
+        {
+            System.err.println("Syntax: HDF5Extract <hdf5 file> <file>");
+            System.exit(1);
+        }
+        final File hdf5File = new File(args[0]);
+        final File file = new File(args[1]);
+        final StopWatch watch = new StopWatch();
+        watch.start();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(hdf5File);
+        final byte[] data = reader.int8().readArray(file.getAbsolutePath());
+        FileUtils.writeByteArrayToFile(new File(file.getName()), data);
+        reader.close();
+        watch.stop();
+        System.out.println("Extracting hdf5 file took " + watch);
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ReadTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ReadTest.java
new file mode 100644
index 0000000..521290f
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5ReadTest.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.util.BitSet;
+import java.util.List;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF5ReadTest
+{
+
+    public static void main(String[] args)
+    {
+        try
+        {
+            IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(new File("test.h5"));
+            System.out.println(reader.object().getGroupMemberPaths("/"));
+            describe(reader, "/Group1/MyBitSet", null);
+            describe(reader, "/Group1/MyDataSet", null);
+            byte[] bsb = reader.readAsByteArray("/Group1/MyBitSet");
+            long[] bsl = HDFNativeData.byteToLong(bsb, 0, bsb.length / NativeData.LONG_SIZE);
+            System.out.println("length of /Group1/MyBitSet=" + bsl.length);
+            for (long l : bsl)
+            {
+                System.out.print(l + " ");
+            }
+            System.out.println();
+            BitSet bs = reader.readBitField("/Group1/MyBitSet");
+            System.out.println(bs);
+            System.out.println(reader.float64().getAttr("/", "version"));
+            List<String> members = reader.object().getGroupMemberPaths("/Group1");
+            for (String m : members)
+            {
+                System.out.println("  " + m);
+            }
+            listAttributes(reader, "/Group1");
+            listAttributes(reader, "/Group1/MyDataSet");
+            describe(reader, "/Group1/MyDataSet", "foo");
+            describe(reader, "/Group1", "active");
+            System.out.println(reader.bool().getAttr("/Group1", "active"));
+            System.out.println(reader.string().getAttr("/Group1/MyDataSet", "foo"));
+            System.out.println(reader.string().getAttr("/Group1/SubGroup1/MyDataSet", "foo"));
+            System.out.println(reader.float64().readMatrix("/Group1/MyDataSet")[1][0]);
+            System.out.println(reader.float32().readMatrix("/Group1/SubGroup1/MyDataSet")[1][2]);
+            System.out.println(reader.string().read("/Group1/MyString").length());
+            listAttributes(reader, "empty");
+        } catch (HDF5LibraryException ex)
+        {
+            System.err.println(ex.getHDF5ErrorStackAsString());
+            ex.printStackTrace();
+        }
+    }
+
+    private static void listAttributes(IHDF5Reader reader, String objectName)
+    {
+        final List<String> attributeNames = reader.object().getAttributeNames(objectName);
+        System.out.printf("Found %d attributes for object '%s':\n", attributeNames.size(),
+                objectName);
+        for (String a : attributeNames)
+        {
+            System.out.println(a);
+        }
+    }
+
+    private static void describe(IHDF5Reader reader, String objectName, String attributeNameOrNull)
+    {
+        HDF5DataSetInformation dsInfo;
+        HDF5DataTypeInformation dtInfo;
+        if (attributeNameOrNull == null)
+        {
+            dsInfo = reader.getDataSetInformation(objectName);
+            dtInfo = dsInfo.getTypeInformation();
+        } else
+        {
+            dsInfo = null;
+            dtInfo = reader.object().getAttributeInformation(objectName, attributeNameOrNull);
+        }
+        System.out.printf("%s%s, class=%s, elemSize=%d", objectName,
+                attributeNameOrNull != null ? "#" + attributeNameOrNull : "",
+                dtInfo.getDataClass(), dtInfo.getElementSize());
+        if (dsInfo != null)
+        {
+            System.out.printf(", rank=%d, scalar=%s, variant=%s\n", dsInfo.getRank(), Boolean
+                    .toString(dsInfo.isScalar()), dsInfo.tryGetTypeVariant());
+            for (long dim : dsInfo.getDimensions())
+            {
+                System.out.println("  DIM " + dim);
+            }
+        } else
+        {
+            System.out.println();
+        }
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5RoundtripTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5RoundtripTest.java
new file mode 100644
index 0000000..e35904b
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5RoundtripTest.java
@@ -0,0 +1,11396 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.HDF5CompoundMemberMapping.mapping;
+import static ch.systemsx.cisd.hdf5.HDF5FloatStorageFeatures.FLOAT_CHUNKED;
+import static ch.systemsx.cisd.hdf5.HDF5FloatStorageFeatures.FLOAT_DEFLATE;
+import static ch.systemsx.cisd.hdf5.HDF5FloatStorageFeatures.FLOAT_SCALING1_DEFLATE;
+import static ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures.GENERIC_DEFLATE;
+import static ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures.GENERIC_DEFLATE_MAX;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_AUTO_SCALING;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_AUTO_SCALING_DEFLATE;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_CHUNKED;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_DEFLATE;
+import static ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures.INT_SHUFFLE_DEFLATE;
+import static ch.systemsx.cisd.hdf5.UnsignedIntUtils.toInt8;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_FLOAT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_INTEGER;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_REFERENCE;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ENUM;
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertFalse;
+import static org.testng.AssertJUnit.assertNotNull;
+import static org.testng.AssertJUnit.assertNull;
+import static org.testng.AssertJUnit.assertTrue;
+import static org.testng.AssertJUnit.fail;
+import static ch.systemsx.cisd.hdf5.UnsignedIntUtils.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Array;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5DatatypeInterfaceException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SymbolTableException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.commons.lang.builder.ToStringBuilder;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDDoubleArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+import ch.systemsx.cisd.base.utilities.OSUtilities;
+import ch.systemsx.cisd.hdf5.HDF5CompoundMappingHints.EnumReturnType;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.FileFormat;
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.SyncMode;
+import ch.systemsx.cisd.hdf5.hdf5lib.H5General;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDFNativeData;
+
+/**
+ * Test cases for {@link IHDF5Writer} and {@link IHDF5Reader}, doing "round-trips" to the HDF5 disk
+ * format and back.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5RoundtripTest
+{
+
+    private static final File rootDirectory = new File("targets", "unit-test-wd");
+
+    private static final File workingDirectory = new File(rootDirectory, "hdf5-roundtrip-wd");
+
+    @BeforeSuite
+    public void init()
+    {
+        workingDirectory.mkdirs();
+        assertTrue(workingDirectory.isDirectory());
+        workingDirectory.deleteOnExit();
+        rootDirectory.deleteOnExit();
+    }
+
+    @Override
+    protected void finalize() throws Throwable
+    {
+        // Delete the working directory
+        if (workingDirectory.exists() && workingDirectory.canWrite())
+        {
+            workingDirectory.delete();
+        }
+        // Delete root directory
+        if (rootDirectory.exists() && rootDirectory.canWrite())
+        {
+            rootDirectory.delete();
+        }
+
+        super.finalize();
+    }
+
+    public static void main(String[] args) throws Throwable
+    {
+        // Print Java Version
+        System.out.println("Java Version: " + System.getProperty("java.version"));
+
+        // Print OS Version
+        System.out.println("Platform: " + OSUtilities.getComputerPlatform());
+
+        HDF5RoundtripTest test = new HDF5RoundtripTest();
+        test.init();
+
+        // Print Library Version
+        final int[] libversion = new int[3];
+        H5General.H5get_libversion(libversion);
+        System.out.println("HDF5 Version: " + libversion[0] + "." + libversion[1] + "."
+                + libversion[2]);
+
+        // Tests
+        test.testStrangeDataSetName();
+        test.testCreateSomeDeepGroup();
+        test.testGetGroupMembersIteratively();
+        test.testScalarValues();
+        test.testUnsignedInt8ValuesArray();
+        test.testUnsignedInt16ValuesArray();
+        test.testOverwriteScalar();
+        test.testOverwriteScalarKeepDataSet();
+        test.testDataSets();
+        test.testDataTypeInfoOptions();
+        test.testCompactDataset();
+        test.testCreateEmptyFixedSizeDataSets();
+        test.testCreateEmptyDefaultFixedSizeDataSets();
+        test.testCreateEmptyGrowableDataSets();
+        test.testCreateZeroLengthGrowableDataSets();
+        test.testExtendChunkedDataset();
+        test.testMaxPathLength();
+        test.testExceedMaxPathLength();
+        test.testAccessClosedReaderWriter();
+        test.testDataSetsNonExtendable();
+        test.testOverwriteContiguousDataSet();
+        test.testScaleOffsetFilterInt();
+        test.testScaleOffsetFilterFloat();
+        test.testBooleanArray();
+        test.testBooleanArrayBlock();
+        test.testBitFieldArray();
+        test.testBitFieldArrayBlockWise();
+        test.testSmallString();
+        test.testReadStringAttributeAsByteArray();
+        test.testReadStringAsByteArray();
+        test.testReadStringVLAsByteArray();
+        test.testStringAttributeFixedLength();
+        test.testStringAttributeFixedLengthExplicitlySaveLength();
+        test.testStringAttributeLength0();
+        test.testStringAttributeFixedLengthOverwriteWithShorter();
+        test.testStringAttributeUTF8FixedLength();
+        test.testStringArrayAttributeLengthFitsValue();
+        test.testStringArrayAttributeFixedLength();
+        test.testStringArrayAttributeUTF8LengthFitsValue();
+        test.testStringArrayAttributeUTF8FixedLength();
+        test.testStringMDArrayAttributeFixedLength();
+        test.testStringMDArrayAttributeUTF8LengthFitsValue();
+        test.testStringMDArrayAttributeUTF8FixedLength();
+        test.testVeryLargeString();
+        test.testOverwriteString();
+        test.testOverwriteStringWithLarge();
+        test.testOverwriteStringWithLargeKeepCompact();
+        test.testStringCompact();
+        test.testStringContiguous();
+        test.testStringUnicode();
+        test.testStringVariableLength();
+        test.testStringArray();
+        test.testStringArrayUTF8();
+        test.testStringArrayUTF8WithZeroChar();
+        test.testStringArrayWithNullStrings();
+        test.testStringMDArrayWithNullStrings();
+        test.testStringArrayBlock();
+        test.testStringArrayBlockCompact();
+        test.testStringArrayCompact();
+        test.testStringCompression();
+        test.testStringArrayCompression();
+        test.testStringVLArray();
+        test.testStringArrayBlockVL();
+        test.testStringArrayMD();
+        test.testStringArrayMDBlocks();
+        test.testStringMDArrayVL();
+        test.testStringMDArrayVLBlocks();
+        test.testMDIntArrayDifferentSizesElementType();
+        test.testMDIntArrayDifferentSizesElementTypeUnsignedByte();
+        test.testReadMDFloatArrayWithSlicing();
+        test.testReadToFloatMDArray();
+        test.testFloatArrayTypeDataSet();
+        test.testFloatArrayTypeDataSetOverwrite();
+        test.testFloatArrayCreateCompactOverwriteBlock();
+        test.testFloatMDArrayTypeDataSet();
+        test.testIterateOverFloatArrayInNaturalBlocks(10, 99);
+        test.testIterateOverFloatArrayInNaturalBlocks(10, 100);
+        test.testIterateOverFloatArrayInNaturalBlocks(10, 101);
+        test.testIterateOverStringArrayInNaturalBlocks(10, 99);
+        test.testIterateOverStringArrayInNaturalBlocks(10, 100);
+        test.testIterateOverStringArrayInNaturalBlocks(10, 101);
+        test.testReadToFloatMDArrayBlockWithOffset();
+        test.testReadToTimeDurationMDArrayBlockWithOffset();
+        test.testIterateOverMDFloatArrayInNaturalBlocks(new int[]
+            { 2, 2 }, new long[]
+            { 4, 3 }, new float[]
+            { 0f, 2f, 6f, 8f }, new int[][]
+            {
+                { 2, 2 },
+                { 2, 1 },
+                { 2, 2 },
+                { 2, 1 } });
+        test.testIterateOverMDFloatArrayInNaturalBlocks(new int[]
+            { 2, 2 }, new long[]
+            { 4, 4 }, new float[]
+            { 0f, 2f, 8f, 10f }, new int[][]
+            {
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 2 } });
+        test.testIterateOverMDFloatArrayInNaturalBlocks(new int[]
+            { 2, 2 }, new long[]
+            { 4, 5 }, new float[]
+            { 0f, 2f, 4f, 10f, 12f, 14f }, new int[][]
+            {
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 1 },
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 1 } });
+        test.testIterateOverMDFloatArrayInNaturalBlocks(new int[]
+            { 3, 2 }, new long[]
+            { 5, 4 }, new float[]
+            { 0f, 2f, 12f, 14f }, new int[][]
+            {
+                { 3, 2 },
+                { 3, 2 },
+                { 2, 2 },
+                { 2, 2 } });
+        test.testIterateOverMDFloatArrayInNaturalBlocks(new int[]
+            { 2, 2 }, new long[]
+            { 5, 4 }, new float[]
+            { 0f, 2f, 8f, 10f, 16f, 18f }, new int[][]
+            {
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 2 },
+                { 2, 2 },
+                { 1, 2 },
+                { 1, 2 } });
+        test.testSetExtentBug();
+        test.testMDFloatArrayBlockWise();
+        test.testMDFloatArraySliced();
+        test.testMDFloatArrayBlockWiseWithMemoryOffset();
+        test.testDoubleArrayAsByteArray();
+        test.testCompressedDataSet();
+        test.testCreateEmptyFloatMatrix();
+        test.testFloatVectorLength1();
+        test.testFloatMatrixLength1();
+        test.testOneRowFloatMatrix();
+        test.testEmptyVectorDataSets();
+        test.testEmptyVectorDataSetsContiguous();
+        test.testEmptyVectorDataSetsCompact();
+        test.testEmptyMatrixDataSets();
+        test.testEmptyMatrixDataSetsContiguous();
+        test.testOverwriteVectorIncreaseSize();
+        test.testOverwriteMatrixIncreaseSize();
+        test.testOverwriteStringVectorDecreaseSize();
+        test.testAttributes();
+        test.testSimpleDataspaceAttributes();
+        test.testTimeStampAttributes();
+        test.testTimeDurationAttributes();
+        test.testTimeStampArrayAttributes();
+        test.testTimeDurationArrayAttributes();
+        test.testAttributeDimensionArray();
+        test.testAttributeDimensionArrayOverwrite();
+        test.testCreateDataTypes();
+        test.testGroups();
+        test.testDefaultHousekeepingFile();
+        test.testNonDefaultHousekeepingFile();
+        test.testHousekeepingFileSuffixNonPrintable();
+        test.testSoftLink();
+        test.testBrokenSoftLink();
+        test.testDeleteSoftLink();
+        test.testRenameLink();
+        try
+        {
+            test.testRenameLinkOverwriteFails();
+        } catch (HDF5SymbolTableException ex)
+        {
+            // Expected.
+        }
+        try
+        {
+            test.testRenameLinkSrcNonExistentFails();
+        } catch (HDF5SymbolTableException ex)
+        {
+            // Expected.
+        }
+        test.testOverwriteKeepWithEmptyString();
+        test.testOverwriteKeepWithShorterString();
+        test.testOverwriteKeepWithLongerString();
+        test.testReplaceWithLongerString();
+        test.testNullOnGetSymbolicLinkTargetForNoLink();
+        test.testUpdateSoftLink();
+        test.testExternalLink();
+        test.testEnum();
+        test.testAnonymousEnum();
+        test.testJavaEnum();
+        test.testEnum16();
+        try
+        {
+            test.testConfusedEnum();
+            System.err.println("testConfusedEnum(): failure not detected.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Enum member index 0 of enum testEnum is 'ONE', but should be 'THREE'",
+                    ex.getMessage());
+        }
+        test.testReplaceConfusedEnum();
+        test.testEnumArray();
+        test.testEnumMDArray();
+        test.testEnumMDArrayBlockWise();
+        test.testJavaEnumArray();
+        test.testEnumArrayBlock();
+        test.testEnumArrayBlockScalingCompression();
+        test.testEnumArrayFromIntArray();
+        test.testEnumArray16BitFromIntArray();
+        test.testEnumArray16BitFromIntArrayScaled();
+        test.testEnumArray16BitFromIntArrayLarge();
+        test.testEnumArrayBlock16Bit();
+        test.testEnumArrayScaleCompression();
+        test.testOpaqueType();
+        test.testCompound();
+        test.testCompoundInferStringLength();
+        test.testCompoundVariableLengthString();
+        test.testCompoundVariableLengthStringUsingHints();
+        test.testCompoundReference();
+        test.testCompoundHintVLString();
+        test.testClosedCompoundType();
+        test.testAnonCompound();
+        test.testOverwriteCompound();
+        test.testOverwriteCompoundKeepType();
+        test.testCompoundJavaEnum();
+        test.testEnumFromCompoundJavaEnum();
+        test.testCompoundJavaEnumArray();
+        test.testCompoundJavaEnumMap();
+        test.testCompoundAttribute();
+        test.testCompoundAttributeMemoryAlignment();
+        test.testCompoundIncompleteJavaPojo();
+        test.testCompoundManualMapping();
+        test.testInferredCompoundType();
+        test.testInferredIncompletelyMappedCompoundType();
+        test.testNameChangeInCompoundMapping();
+        test.testInferredCompoundTypedWithEnum();
+        test.testInferredCompoundTypeWithEnumArray();
+        test.testCompoundMap();
+        test.testCompoundMapManualMapping();
+        test.testCompoundMapManualMappingWithConversion();
+        test.testDateCompound();
+        test.testMatrixCompound();
+        try
+        {
+            test.testMatrixCompoundSizeMismatch();
+            System.err.println("testMatrixCompoundSizeMismatch(): failure not detected.");
+        } catch (IllegalArgumentException ex)
+        {
+            // Expected
+        }
+        try
+        {
+            test.testMatrixCompoundDifferentNumberOfColumnsPerRow();
+            System.err
+                    .println("testMatrixCompoundDifferentNumberOfColumnsPerRow(): failure not detected.");
+        } catch (IllegalArgumentException ex)
+        {
+            // Expected
+        }
+        test.testCompoundOverflow();
+        test.testBitFieldCompound();
+        test.testCompoundMapArray();
+        test.testCompoundArray();
+        test.testCompoundArrayBlockWise();
+        test.testCompoundMapMDArray();
+        test.testCompoundMDArray();
+        test.testCompoundMDArrayManualMapping();
+        test.testCompoundMDArrayBlockWise();
+        test.testIterateOverMDCompoundArrayInNaturalBlocks();
+        test.testConfusedCompound();
+        test.testMDArrayCompound();
+        test.testMDArrayCompoundArray();
+        test.testGetGroupMemberInformation();
+        try
+        {
+            test.testGetLinkInformationFailed();
+            System.err.println("testGetObjectTypeFailed(): failure not detected.");
+        } catch (HDF5JavaException ex)
+        {
+            // Expected
+        }
+        test.testGetObjectType();
+        test.testHardLink();
+        test.testNullOnGetSymbolicLinkTargetForNoLink();
+        test.testReadByteArrayDataSetBlockWise();
+        test.testWriteByteArrayDataSetBlockWise();
+        test.testCreateByteArrayDataSetBlockSize0();
+        test.testCreateFloatArrayWithDifferentStorageLayouts();
+        test.testWriteByteArrayDataSetBlockWiseExtend();
+        test.testWriteByteMatrixDataSetBlockWise();
+        test.testWriteByteArrayDataSetBlockWiseMismatch();
+        test.testWriteByteMatrixDataSetBlockWiseMismatch();
+        test.testReadFloatMatrixDataSetBlockWise();
+        test.testWriteFloatMatrixDataSetBlockWise();
+        test.testWriteFloatMatrixDataSetBlockWiseWithOffset();
+        test.testReadMDFloatArrayAsByteArray();
+        test.testExtendContiguousDataset();
+        test.testAutomaticDeletionOfDataSetOnWrite();
+        test.testAutomaticDeletionOfDataSetOnCreate();
+        test.testTimestamps();
+        test.testTimestampArray();
+        test.testTimestampArrayChunked();
+        test.testTimeDurations();
+        test.testSmallTimeDurations();
+        test.testTimeDurationArray();
+        test.testTimeDurationMDArray();
+        test.testTimeDurationArrayChunked();
+        test.testNumericConversion();
+        test.testNumericConversionWithNumericConversionsSwitchedOff();
+        test.testSetDataSetSize();
+        test.testObjectReference();
+        test.testObjectReferenceArray();
+        test.testObjectReferenceOverwriteWithKeep();
+        test.testObjectReferenceOverwriteWithKeepOverridden();
+        test.testObjectReferenceArrayBlockWise();
+        test.testObjectReferenceAttribute();
+        test.testObjectReferenceArrayAttribute();
+        test.testObjectReferenceMDArrayAttribute();
+        test.testObjectReferenceMDArray();
+        test.testObjectReferenceMDArrayBlockWise();
+        test.testHDFJavaLowLevel();
+
+        test.finalize();
+    }
+
+    @Test
+    public void testStrangeDataSetName()
+    {
+        final File file = new File(workingDirectory, "testStrangeDataSetName.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).noAutoDereference().writer();
+        writer.int32().write("\0\255", 15);
+        writer.int32().write("\0\254", 13);
+        writer.close();
+        final IHDF5Reader reader =
+                HDF5Factory.configureForReading(file).noAutoDereference().reader();
+        assertEquals(15, reader.int32().read("\0\255"));
+        assertEquals(13, reader.int32().read("\0\254"));
+        reader.close();
+    }
+
+    @Test
+    public void testCreateSomeDeepGroup()
+    {
+        final File datasetFile = new File(workingDirectory, "deepGroup.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).useUTF8CharacterEncoding()
+                        .writer();
+        final String groupName = "/some/deep/and/non/existing/group";
+        writer.object().createGroup(groupName);
+        assertTrue(writer.isGroup(groupName));
+        writer.close();
+    }
+
+    @Test
+    public void testGetGroupMembersIteratively()
+    {
+        final File datasetFile = new File(workingDirectory, "writereadwriteread.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String groupName = "/test/group/";
+        final String dset1Name = "dset1";
+        final String dset1Path = groupName + dset1Name;
+        final float[] dset1 = new float[]
+            { 1.3f, 2.4f, 3.6f };
+        writer.float32().writeArray(dset1Path, dset1);
+        final List<String> members1 = writer.getGroupMembers(groupName);
+        assertEquals(1, members1.size());
+        assertEquals(dset1Name, members1.get(0));
+        final String dset2Name = "dset2";
+        final String dset2Path = groupName + dset2Name;
+        final int[] dset2 = new int[]
+            { 1, 2, 3 };
+        writer.int32().writeArray(dset2Path, dset2);
+        final Set<String> members2 = new HashSet<String>(writer.getGroupMembers(groupName));
+        assertEquals(2, members2.size());
+        assertTrue(members2.contains(dset1Name));
+        assertTrue(members2.contains(dset2Name));
+        writer.close();
+    }
+
+    @Test
+    public void testOverwriteScalar()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteScalar.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(datasetFile);
+        writer.int32().write("a", 4);
+        assertEquals(HDF5DataClass.INTEGER, writer.getDataSetInformation("a").getTypeInformation()
+                .getDataClass());
+        assertTrue(writer.getDataSetInformation("a").isSigned());
+        writer.float32().write("a", 1e6f);
+        assertEquals(HDF5DataClass.FLOAT, writer.getDataSetInformation("a").getTypeInformation()
+                .getDataClass());
+        assertTrue(writer.getDataSetInformation("a").isSigned());
+        assertEquals(1e6f, writer.float32().read("a"));
+        writer.close();
+    }
+
+    @Test
+    public void testOverwriteScalarKeepDataSet()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteScalarKeepDataSet.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5Factory.configure(datasetFile).keepDataSetsIfTheyExist().writer();
+        writer.int32().write("a", 4);
+        assertEquals(HDF5DataClass.INTEGER, writer.getDataSetInformation("a").getTypeInformation()
+                .getDataClass());
+        writer.float32().write("a", 5.1f);
+        assertEquals(HDF5DataClass.INTEGER, writer.getDataSetInformation("a").getTypeInformation()
+                .getDataClass());
+        assertEquals(5, writer.int32().read("a"));
+        writer.close();
+    }
+
+    @Test
+    public void testScalarValues()
+    {
+        final File datasetFile = new File(workingDirectory, "values.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String booleanDatasetName = "/boolean";
+        writer.writeBoolean(booleanDatasetName, true);
+        final String byteDatasetName = "/byte";
+        writer.int8().write(byteDatasetName, toInt8(17));
+        final String unsignedByteOverflowDatasetName = "/ubyteOverflow";
+        writer.uint8().write(unsignedByteOverflowDatasetName, (byte) 1024);
+        final String shortDatasetName = "/short";
+        writer.int16().write(shortDatasetName, (short) 1000);
+        final String intDatasetName = "/int";
+        writer.int32().write(intDatasetName, 1000000);
+        final String longDatasetName = "/long";
+        writer.int64().write(longDatasetName, 10000000000L);
+        final String floatDatasetName = "/float";
+        writer.float32().write(floatDatasetName, 0.001f);
+        final String doubleDatasetName = "/double";
+        writer.float64().write(doubleDatasetName, 1.0E100);
+        final String stringDatasetName = "/string";
+        writer.string().write(stringDatasetName, "some string");
+        final String stringWithZeroDatasetName = "/stringWithZero";
+        writer.string().write(stringWithZeroDatasetName, "some string\0with zero");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(reader.readBoolean(booleanDatasetName));
+        assertEquals(0, reader.object().getRank(booleanDatasetName));
+        assertTrue(Arrays.equals(new long[0], reader.object().getDimensions(booleanDatasetName)));
+        assertEquals(17, reader.int8().read(byteDatasetName));
+        assertEquals(0, reader.object().getRank(byteDatasetName));
+        assertTrue(Arrays.equals(new long[0], reader.object().getDimensions(byteDatasetName)));
+        assertTrue(reader.getDataSetInformation(byteDatasetName).isSigned());
+        assertEquals(0, reader.int16().read(unsignedByteOverflowDatasetName));
+        assertFalse(reader.getDataSetInformation(unsignedByteOverflowDatasetName).isSigned());
+        assertEquals(1000, reader.int16().read(shortDatasetName));
+        assertEquals(1000000, reader.int32().read(intDatasetName));
+        assertEquals(10000000000L, reader.int64().read(longDatasetName));
+        assertEquals(0.001f, reader.float32().read(floatDatasetName));
+        assertEquals(0, reader.object().getRank(floatDatasetName));
+        assertTrue(Arrays.equals(new long[0], reader.object().getDimensions(floatDatasetName)));
+        assertEquals(1.0E100, reader.float64().read(doubleDatasetName));
+        assertEquals("some string", reader.string().read(stringDatasetName));
+        assertEquals("some string", reader.string().read(stringWithZeroDatasetName));
+        assertEquals("some string\0with zero", reader.string().readRaw(stringWithZeroDatasetName));
+        reader.close();
+    }
+
+    @Test
+    public void testUnsignedInt8ValuesArray()
+    {
+        final String byteDatasetName = "/byte";
+        final File datasetFile = new File(workingDirectory, "unsignedInt8Values.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final byte[] valuesWritten = new byte[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, (byte) 128, (byte) 255 };
+        writer.uint8().writeArray(byteDatasetName, valuesWritten);
+        writer.uint8().setAttr(byteDatasetName, "attr", (byte) 224);
+        final byte[] valuesRead1 = writer.uint8().readArray(byteDatasetName);
+        assertTrue(Arrays.equals(valuesWritten, valuesRead1));
+        assertEquals(224, UnsignedIntUtils.toUint8(writer.uint8().getAttr(byteDatasetName, "attr")));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertFalse(reader.getDataSetInformation(byteDatasetName).isSigned());
+        final byte[] valuesRead2 = reader.uint8().readArray(byteDatasetName);
+        assertTrue(Arrays.equals(valuesWritten, valuesRead2));
+        assertEquals(224, UnsignedIntUtils.toUint8(reader.uint8().getAttr(byteDatasetName, "attr")));
+        reader.close();
+    }
+
+    @Test
+    public void testUnsignedInt16ValuesArray()
+    {
+        final String byteDatasetName = "/byte";
+        final File datasetFile = new File(workingDirectory, "unsignedInt16Values.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final short[] valuesWritten = new short[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 256, 1024 };
+        writer.uint16().writeArray(byteDatasetName, valuesWritten);
+        writer.uint16().setAttr(byteDatasetName, "attr", (short) 60000);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertFalse(reader.getDataSetInformation(byteDatasetName).isSigned());
+        final short[] valuesRead = reader.uint16().readArray(byteDatasetName);
+        assertTrue(Arrays.equals(valuesWritten, valuesRead));
+        assertEquals(60000,
+                UnsignedIntUtils.toUint16(reader.uint16().getAttr(byteDatasetName, "attr")));
+        reader.close();
+    }
+
+    @Test
+    public void testReadMDFloatArrayWithSlicing()
+    {
+        final File datasetFile = new File(workingDirectory, "mdArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/floatMatrix";
+        final MDFloatArray arrayWritten = new MDFloatArray(new int[]
+            { 3, 2, 4 });
+        int count = 0;
+        for (int i = 0; i < arrayWritten.size(0); ++i)
+        {
+            for (int j = 0; j < arrayWritten.size(1); ++j)
+            {
+                for (int k = 0; k < arrayWritten.size(2); ++k)
+                {
+                    arrayWritten.set(++count, new int[]
+                        { i, j, k });
+                }
+            }
+        }
+        writer.float32().writeMDArray(floatDatasetName, arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final MDFloatArray arrayRead = reader.float32().readMDArray(floatDatasetName);
+        assertEquals(arrayWritten, arrayRead);
+        final IndexMap boundIndex1 = new IndexMap().bind(1, 0);
+        final long[] boundIndex1Arr = new long[]
+            { -1, 0, -1 };
+        final MDFloatArray slice1 = new MDFloatArray(new float[]
+            { 1f, 2f, 3f, 4f, 9f, 10f, 11f, 12f, 17f, 18f, 19f, 20f }, new int[]
+            { 3, 4 });
+        final MDFloatArray slice1BlockOfs00 = new MDFloatArray(new float[]
+            { 1f, 2f, 9f, 10f }, new int[]
+            { 2, 2 });
+        final MDFloatArray slice1BlockOfs01 = new MDFloatArray(new float[]
+            { 2f, 3f, 10f, 11f }, new int[]
+            { 2, 2 });
+        final MDFloatArray slice1BlockOfs10 = new MDFloatArray(new float[]
+            { 9f, 10f, 17f, 18f }, new int[]
+            { 2, 2 });
+        final MDFloatArray slice1BlockOfs11 = new MDFloatArray(new float[]
+            { 10f, 11f, 18f, 19f }, new int[]
+            { 2, 2 });
+        final IndexMap boundIndex2 = new IndexMap().bind(2, 3).bind(0, 1);
+        final long[] boundIndex2Arr = new long[]
+            { 1, -1, 3 };
+        final MDFloatArray slice2 = new MDFloatArray(new float[]
+            { 12f, 16f }, new int[]
+            { 2 });
+        assertEquals(slice1, reader.float32().readMDArraySlice(floatDatasetName, boundIndex1));
+        assertEquals(slice1, reader.float32().readMDArraySlice(floatDatasetName, boundIndex1Arr));
+        assertEquals(slice1BlockOfs00,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 0, 0 }, boundIndex1));
+        assertEquals(slice1BlockOfs00,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 0, 0 }, boundIndex1Arr));
+        assertEquals(slice1BlockOfs01,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 0, 1 }, boundIndex1));
+        assertEquals(slice1BlockOfs01,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 0, 1 }, boundIndex1Arr));
+        assertEquals(slice1BlockOfs10,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 1, 0 }, boundIndex1));
+        assertEquals(slice1BlockOfs10,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 1, 0 }, boundIndex1Arr));
+        assertEquals(slice1BlockOfs11,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 1, 1 }, boundIndex1));
+        assertEquals(slice1BlockOfs11,
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName, new int[]
+                    { 2, 2 }, new long[]
+                    { 1, 1 }, boundIndex1Arr));
+        assertEquals(slice2, reader.float32().readMDArraySlice(floatDatasetName, boundIndex2));
+        assertEquals(slice2, reader.float32().readMDArraySlice(floatDatasetName, boundIndex2Arr));
+        reader.close();
+    }
+
+    @Test
+    public void testBooleanArray()
+    {
+        final File datasetFile = new File(workingDirectory, "booleanArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String booleanDatasetName = "/booleanArray";
+        final String longArrayDataSetName = "/longArray";
+        final BitSet arrayWritten = new BitSet();
+        arrayWritten.set(32);
+        writer.writeBitField(booleanDatasetName, arrayWritten);
+        writer.int64().writeArray(longArrayDataSetName,
+                BitSetConversionUtils.toStorageForm(arrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final BitSet arrayRead = reader.readBitField(booleanDatasetName);
+        try
+        {
+            reader.readBitField(longArrayDataSetName);
+            fail("Failed to detect type mismatch.");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            // Expected, as the types do not match.
+        }
+        assertEquals(arrayWritten, arrayRead);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(booleanDatasetName);
+        assertEquals(HDF5DataClass.BITFIELD, info.getTypeInformation().getDataClass());
+        assertChunkSizes(info, HDF5Utils.MIN_CHUNK_SIZE);
+        reader.close();
+    }
+
+    @Test
+    public void testBooleanArrayBlock()
+    {
+        final File datasetFile = new File(workingDirectory, "booleanArrayBlock.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String booleanDatasetName = "/booleanArray";
+        final BitSet arrayWritten = new BitSet();
+        writer.bool().createBitField(booleanDatasetName, 4L, 2);
+        arrayWritten.set(32);
+        arrayWritten.set(40);
+        writer.bool().writeBitFieldBlock(booleanDatasetName, arrayWritten, 2, 0);
+        arrayWritten.clear();
+        arrayWritten.set(0);
+        writer.bool().writeBitFieldBlock(booleanDatasetName, arrayWritten, 2, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final BitSet arrayBlockRead = reader.bool().readBitFieldBlock(booleanDatasetName, 2, 1);
+        assertEquals(1, arrayBlockRead.cardinality());
+        assertTrue(arrayBlockRead.get(0));
+        assertTrue(reader.bool().isBitSet(booleanDatasetName, 32));
+        assertTrue(reader.bool().isBitSet(booleanDatasetName, 40));
+        assertTrue(reader.bool().isBitSet(booleanDatasetName, 128));
+        assertFalse(reader.bool().isBitSet(booleanDatasetName, 33));
+        assertFalse(reader.bool().isBitSet(booleanDatasetName, 64));
+        assertFalse(reader.bool().isBitSet(booleanDatasetName, 256));
+        assertFalse(reader.bool().isBitSet(booleanDatasetName, 512));
+        reader.close();
+    }
+
+    @Test
+    public void testBitFieldArray()
+    {
+        final File datasetFile = new File(workingDirectory, "bitFieldArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String booleanDatasetName = "/bitFieldArray";
+        final BitSet[] arrayWritten = new BitSet[]
+            { new BitSet(), new BitSet(), new BitSet() };
+        arrayWritten[0].set(32);
+        arrayWritten[1].set(40);
+        arrayWritten[2].set(17);
+        writer.bool().writeBitFieldArray(booleanDatasetName, arrayWritten, INT_AUTO_SCALING);
+        final String bigBooleanDatasetName = "/bigBitFieldArray";
+        final BitSet[] bigAarrayWritten = new BitSet[]
+            { new BitSet(), new BitSet(), new BitSet() };
+        bigAarrayWritten[0].set(32);
+        bigAarrayWritten[1].set(126);
+        bigAarrayWritten[2].set(17);
+        bigAarrayWritten[2].set(190);
+        writer.bool().writeBitFieldArray(bigBooleanDatasetName, bigAarrayWritten, INT_AUTO_SCALING);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(
+                Arrays.toString(reader.object().getDataSetInformation(booleanDatasetName)
+                        .getDimensions()),
+                Arrays.equals(new long[]
+                    { 1, 3 }, reader.object().getDataSetInformation(booleanDatasetName)
+                        .getDimensions()));
+        assertEquals(HDF5DataClass.BITFIELD,
+                reader.object().getDataSetInformation(booleanDatasetName).getTypeInformation()
+                        .getDataClass());
+        final BitSet[] arrayRead = reader.bool().readBitFieldArray(booleanDatasetName);
+        assertEquals(3, arrayRead.length);
+        assertEquals(1, arrayRead[0].cardinality());
+        assertTrue(arrayRead[0].get(32));
+        assertEquals(1, arrayRead[1].cardinality());
+        assertTrue(arrayRead[1].get(40));
+        assertEquals(1, arrayRead[2].cardinality());
+        assertTrue(arrayRead[2].get(17));
+
+        assertEquals(HDF5DataClass.BITFIELD,
+                reader.object().getDataSetInformation(bigBooleanDatasetName).getTypeInformation()
+                        .getDataClass());
+        final BitSet[] bigArrayRead = reader.bool().readBitFieldArray(bigBooleanDatasetName);
+        assertEquals(3, arrayRead.length);
+        assertEquals(1, bigArrayRead[0].cardinality());
+        assertTrue(bigArrayRead[0].get(32));
+        assertEquals(1, bigArrayRead[1].cardinality());
+        assertTrue(bigArrayRead[1].get(126));
+        assertEquals(2, bigArrayRead[2].cardinality());
+        assertTrue(bigArrayRead[2].get(17));
+        assertTrue(bigArrayRead[2].get(190));
+        reader.close();
+    }
+
+    @Test
+    public void testBitFieldArrayBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "bitFieldArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String booleanDatasetName = "/bitFieldArray";
+        final BitSet[] arrayWritten = new BitSet[]
+            { new BitSet(), new BitSet(), new BitSet(), new BitSet() };
+        arrayWritten[0].set(0);
+        arrayWritten[1].set(1);
+        arrayWritten[2].set(2);
+        arrayWritten[3].set(3);
+        final int count = 100;
+        writer.bool().createBitFieldArray(booleanDatasetName, 4, count * arrayWritten.length,
+                INT_AUTO_SCALING);
+        for (int i = 0; i < count; ++i)
+        {
+            writer.bool().writeBitFieldArrayBlock(booleanDatasetName, arrayWritten, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info =
+                reader.object().getDataSetInformation(booleanDatasetName);
+        assertEquals(HDF5DataClass.BITFIELD, info.getTypeInformation().getDataClass());
+        assertEquals(2, info.getDimensions().length);
+        assertEquals(1, info.getDimensions()[0]);
+        assertEquals(count * arrayWritten.length, info.getDimensions()[1]);
+        for (int i = 0; i < count; ++i)
+        {
+            assertTrue(
+                    "Block " + i,
+                    Arrays.equals(arrayWritten,
+                            reader.bool().readBitFieldArrayBlock(booleanDatasetName, 4, i)));
+        }
+        reader.close();
+    }
+
+    private void assertChunkSizes(final HDF5DataSetInformation info,
+            final long... expectedChunkSize)
+    {
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        final int[] chunkSize = info.tryGetChunkSizes();
+        assertNotNull(chunkSize);
+        assertEquals(expectedChunkSize.length, chunkSize.length);
+        for (int i = 0; i < expectedChunkSize.length; ++i)
+        {
+            assertEquals(Integer.toString(i), expectedChunkSize[i], chunkSize[i]);
+        }
+    }
+
+    @Test
+    public void testMDFloatArrayBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "mdArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/floatMatrix";
+        final String floatDatasetName2 = "/floatMatrix2";
+        final long[] shape = new long[]
+            { 10, 10, 10 };
+        final int[] blockShape = new int[]
+            { 5, 5, 5 };
+        writer.float32().createMDArray(floatDatasetName, shape, blockShape);
+        writer.float32().createMDArray(floatDatasetName2, shape, blockShape);
+        final float[] flatArray = new float[MDArray.getLength(blockShape)];
+        for (int i = 0; i < flatArray.length; ++i)
+        {
+            flatArray[i] = i;
+        }
+        final MDFloatArray arrayBlockWritten = new MDFloatArray(flatArray, blockShape);
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                for (int k = 0; k < 2; ++k)
+                {
+                    final long[] blockIndex = new long[]
+                        { i, j, k };
+                    writer.float32().writeMDArrayBlock(floatDatasetName, arrayBlockWritten,
+                            blockIndex);
+                    writer.float32().writeMDArrayBlock(floatDatasetName2, arrayBlockWritten,
+                            blockIndex);
+                }
+            }
+        }
+
+        final MDFloatArray arraySliceWritten1 = new MDFloatArray(new float[]
+            { 1000f, 2000f, 3000f, 4000f, 5000f }, new int[]
+            { 1, 5 });
+        final long[] slicedBlock1 = new long[]
+            { 4, 1 };
+        final IndexMap imap1 = new IndexMap().bind(1, 7);
+        writer.float32().writeSlicedMDArrayBlock(floatDatasetName2, arraySliceWritten1,
+                slicedBlock1, imap1);
+
+        final MDFloatArray arraySliceWritten2 = new MDFloatArray(new float[]
+            { -1f, -2f, -3f, -4f, -5f, -6f }, new int[]
+            { 3, 2 });
+        final long[] slicedBlockOffs2 = new long[]
+            { 2, 6 };
+        final IndexMap imap2 = new IndexMap().bind(0, 9);
+        writer.float32().writeSlicedMDArrayBlockWithOffset(floatDatasetName2, arraySliceWritten2,
+                slicedBlockOffs2, imap2);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                for (int k = 0; k < 2; ++k)
+                {
+                    final long[] blockIndex = new long[]
+                        { i, j, k };
+                    final MDFloatArray arrayRead =
+                            reader.float32().readMDArrayBlock(floatDatasetName, blockShape,
+                                    blockIndex);
+                    assertEquals(Arrays.toString(blockIndex), arrayBlockWritten, arrayRead);
+                    // {0, 1, 1} is the first block we overwrote, { 1, 0, 1} the second block.
+                    if (false == Arrays.equals(new long[]
+                        { 0, 1, 1 }, blockIndex) && false == Arrays.equals(new long[]
+                        { 1, 0, 1 }, blockIndex))
+                    {
+                        assertEquals(
+                                Arrays.toString(blockIndex),
+                                arrayBlockWritten,
+                                reader.float32().readMDArrayBlock(floatDatasetName2, blockShape,
+                                        blockIndex));
+                    }
+                }
+            }
+        }
+        final MDFloatArray arraySliceRead1 =
+                reader.float32().readSlicedMDArrayBlock(floatDatasetName2,
+                        arraySliceWritten1.dimensions(), slicedBlock1, imap1);
+        assertEquals(arraySliceWritten1, arraySliceRead1);
+        final MDFloatArray arraySliceRead2 =
+                reader.float32().readSlicedMDArrayBlockWithOffset(floatDatasetName2,
+                        arraySliceWritten2.dimensions(), slicedBlockOffs2, imap2);
+        assertEquals(arraySliceWritten2, arraySliceRead2);
+        reader.close();
+    }
+
+    @Test
+    public void testMDFloatArraySliced()
+    {
+        final File datasetFile = new File(workingDirectory, "mdArraySliced.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/floatMatrix";
+        final long[] shape = new long[]
+            { 10, 10, 10 };
+        final int[] sliceShape = new int[]
+            { 10, 10 };
+        final int[] sliceBlockShape = new int[]
+            { 1, 10, 10 };
+        writer.float32().createMDArray(floatDatasetName, shape, sliceBlockShape);
+        final float[] baseArray = new float[MDArray.getLength(sliceShape)];
+        for (int i = 0; i < baseArray.length; ++i)
+        {
+            baseArray[i] = i;
+        }
+        float[] floatArrayWritten = baseArray.clone();
+        for (int i = 0; i < 10; ++i)
+        {
+            writer.float32().writeMDArraySlice(floatDatasetName,
+                    new MDFloatArray(timesTwo(floatArrayWritten), sliceShape), new long[]
+                        { i, -1, -1 });
+        }
+        writer.close();
+
+        floatArrayWritten = baseArray.clone();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        for (int i = 0; i < 10; ++i)
+        {
+            final MDFloatArray arrayRead =
+                    reader.float32().readMDArraySlice(floatDatasetName, new long[]
+                        { i, -1, -1 });
+            assertEquals(Integer.toString(i), new MDFloatArray(timesTwo(floatArrayWritten),
+                    sliceShape), arrayRead);
+        }
+        reader.close();
+    }
+
+    private static float[] timesTwo(float[] array)
+    {
+        for (int i = 0; i < array.length; ++i)
+        {
+            array[i] *= 2;
+        }
+        return array;
+    }
+
+    @Test
+    public void testMDFloatArrayBlockWiseWithMemoryOffset()
+    {
+        final File datasetFile = new File(workingDirectory, "mdArrayBlockWiseWithMemoryOffset.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/floatMatrix";
+        final long[] shape = new long[]
+            { 10, 10 };
+        writer.float32().createMDArray(floatDatasetName, shape, MDArray.toInt(shape));
+        final float[] flatArray = new float[MDArray.getLength(shape)];
+        for (int i = 0; i < flatArray.length; ++i)
+        {
+            flatArray[i] = i;
+        }
+        final MDFloatArray arrayBlockWritten = new MDFloatArray(flatArray, shape);
+        writer.float32().writeMDArrayBlockWithOffset(floatDatasetName, arrayBlockWritten, new int[]
+            { 2, 2 }, new long[]
+            { 0, 0 }, new int[]
+            { 1, 3 });
+        writer.float32().writeMDArrayBlockWithOffset(floatDatasetName, arrayBlockWritten, new int[]
+            { 2, 2 }, new long[]
+            { 2, 2 }, new int[]
+            { 5, 1 });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] matrixRead = reader.float32().readMatrix(floatDatasetName);
+        reader.close();
+        assertEquals(13f, matrixRead[0][0]);
+        assertEquals(14f, matrixRead[0][1]);
+        assertEquals(23f, matrixRead[1][0]);
+        assertEquals(24f, matrixRead[1][1]);
+        assertEquals(51f, matrixRead[2][2]);
+        assertEquals(52f, matrixRead[2][3]);
+        assertEquals(61f, matrixRead[3][2]);
+        assertEquals(62f, matrixRead[3][3]);
+        for (int i = 0; i < 10; ++i)
+        {
+            for (int j = 0; j < 10; ++j)
+            {
+                if ((i < 2 && j < 2) || (i > 1 && i < 4 && j > 1 && j < 4))
+                {
+                    continue;
+                }
+                assertEquals("(" + i + "," + j + "}", 0f, matrixRead[i][j]);
+            }
+        }
+    }
+
+    @Test
+    public void testStringVariableLength()
+    {
+        final File datasetFile = new File(workingDirectory, "stringVariableLength.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.string().writeVL("a", "");
+        writer.string().writeVL("b", "\0");
+        writer.string().writeVL("c", "\0ABC\0");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals("", reader.readString("a"));
+        assertEquals("\0", reader.readString("b"));
+        assertEquals("\0ABC\0", reader.readString("c"));
+        reader.close();
+    }
+
+    @Test
+    public void testDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "datasets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/Group1/floats";
+        final float[] floatDataWritten = new float[]
+            { 2.8f, 8.2f, -3.1f, 0.0f, 10000.0f };
+        writer.float32().writeArray(floatDatasetName, floatDataWritten);
+        final long[] longDataWritten = new long[]
+            { 10, -1000000, 1, 0, 100000000000L };
+        final String longDatasetName = "/Group2/longs";
+        writer.int64().writeArray(longDatasetName, longDataWritten);
+        final byte[] byteDataWritten = new byte[]
+            { 0, -1, 1, -128, 127 };
+        final String byteDatasetName = "/Group2/bytes";
+        writer.int8().writeArray(byteDatasetName, byteDataWritten, INT_DEFLATE);
+        final short[] shortDataWritten = new short[]
+            { 0, -1, 1, -128, 127 };
+        final String shortDatasetName = "/Group2/shorts";
+        writer.int16().writeArray(shortDatasetName, shortDataWritten, INT_DEFLATE);
+        final String intDatasetName1 = "/Group2/ints1";
+        final int[] intDataWritten = new int[]
+            { 0, 1, 2, 3, 4 };
+        final String intDatasetName2 = "/Group2/ints2";
+        writer.int32().writeArray(intDatasetName1, intDataWritten, INT_DEFLATE);
+        writer.int32().writeArray(intDatasetName2, intDataWritten, INT_SHUFFLE_DEFLATE);
+        writer.file().flush();
+        final String stringDataWritten1 = "Some Random String";
+        final String stringDataWritten2 = "Another Random String";
+        final String stringDatasetName = "/Group3/strings";
+        final String stringDatasetName2 = "/Group4/strings";
+        writer.string().write(stringDatasetName, stringDataWritten1);
+        writer.string().writeVL(stringDatasetName2, stringDataWritten1);
+        writer.string().writeVL(stringDatasetName2, stringDataWritten2);
+        final String stringDatasetName3 = "/Group4/stringArray";
+        writer.string().writeArrayVL(stringDatasetName3, new String[]
+            { stringDataWritten1, stringDataWritten2 });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[] floatDataRead = reader.float32().readArray(floatDatasetName);
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        assertEquals(1, reader.object().getRank(floatDatasetName));
+        assertTrue(Arrays.equals(new long[]
+            { floatDataWritten.length }, reader.object().getDimensions(floatDatasetName)));
+        final long[] longDataRead = reader.int64().readArray(longDatasetName);
+        assertTrue(Arrays.equals(longDataWritten, longDataRead));
+        final byte[] byteDataRead = reader.int8().readArray(byteDatasetName);
+        assertTrue(Arrays.equals(byteDataWritten, byteDataRead));
+        final short[] shortDataRead = reader.int16().readArray(shortDatasetName);
+        assertTrue(Arrays.equals(shortDataWritten, shortDataRead));
+        final String stringDataRead1 = reader.string().read(stringDatasetName);
+        final int[] intDataRead1 = reader.int32().readArray(intDatasetName1);
+        assertTrue(Arrays.equals(intDataWritten, intDataRead1));
+        final int[] intDataRead2 = reader.int32().readArray(intDatasetName2);
+        assertTrue(Arrays.equals(intDataWritten, intDataRead2));
+        assertEquals(stringDataWritten1, stringDataRead1);
+        final String stringDataRead2 = reader.string().read(stringDatasetName2);
+        assertEquals(stringDataWritten2, stringDataRead2);
+        final String[] vlStringArrayRead = reader.string().readArray(stringDatasetName3);
+        assertEquals(stringDataWritten1, vlStringArrayRead[0]);
+        assertEquals(stringDataWritten2, vlStringArrayRead[1]);
+        reader.close();
+    }
+
+    @Test
+    public void testScaleOffsetFilterInt()
+    {
+        final File datasetFile = new File(workingDirectory, "scaleoffsetfilterint.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final int[] intWritten = new int[1000000];
+        for (int i = 0; i < intWritten.length; ++i)
+        {
+            intWritten[i] = (i % 4);
+        }
+        writer.int32().writeArray("ds", intWritten, INT_AUTO_SCALING_DEFLATE);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final int[] intRead = reader.int32().readArray("ds");
+        assertTrue(Arrays.equals(intRead, intWritten));
+        reader.close();
+
+        // Shouldn't work in strict HDF5 1.6 mode.
+        final File file2 = new File(workingDirectory, "scaleoffsetfilterintfailed.h5");
+        file2.delete();
+        assertFalse(file2.exists());
+        file2.deleteOnExit();
+        final IHDF5Writer writer2 =
+                HDF5FactoryProvider.get().configure(file2).fileFormat(FileFormat.STRICTLY_1_6)
+                        .writer();
+        try
+        {
+            writer2.int32().writeArray("ds", intWritten, INT_AUTO_SCALING_DEFLATE);
+            fail("Usage of scaling compression in strict HDF5 1.6 mode not detected");
+        } catch (IllegalStateException ex)
+        {
+            assertTrue(ex.getMessage().indexOf("not allowed") >= 0);
+        }
+        writer2.close();
+    }
+
+    @Test
+    public void testScaleOffsetFilterFloat()
+    {
+        final File datasetFile = new File(workingDirectory, "scaleoffsetfilterfloat.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final float[] floatWritten = new float[1000000];
+        for (int i = 0; i < floatWritten.length; ++i)
+        {
+            floatWritten[i] = (i % 10) / 10f;
+        }
+        writer.float32().writeArray("ds", floatWritten, FLOAT_SCALING1_DEFLATE);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[] floatRead = reader.float32().readArray("ds");
+        assertTrue(Arrays.equals(floatRead, floatWritten));
+        reader.close();
+    }
+
+    @Test
+    public void testMaxPathLength()
+    {
+        final File datasetFile = new File(workingDirectory, "maxpathlength.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String madnessOverwhelmesUs1 = StringUtils.repeat("a", 16384);
+        final String madnessOverwhelmesUs2 = StringUtils.repeat("/b", 8192);
+        writer.int32().write(madnessOverwhelmesUs1, 17);
+        writer.float32().write(madnessOverwhelmesUs2, 0.0f);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(17, reader.int32().read(madnessOverwhelmesUs1));
+        assertEquals(0.0f, reader.float32().read(madnessOverwhelmesUs2));
+        reader.close();
+    }
+
+    @Test
+    public void testExceedMaxPathLength()
+    {
+        final File datasetFile = new File(workingDirectory, "exceedmaxpathlength.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String madnessOverwhelmesUs = StringUtils.repeat("a", 16385);
+        try
+        {
+            writer.int32().write(madnessOverwhelmesUs, 17);
+            fail("path overflow not detected");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals(0, ex.getMessage().indexOf("Path too long"));
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    @Test
+    public void testAccessClosedReaderWriter()
+    {
+        final File datasetFile = new File(workingDirectory, "datasetsNonExtendable.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.close();
+        try
+        {
+            writer.writeBoolean("dataSet", true);
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals(String.format("HDF5 file '%s' is closed.", datasetFile.getAbsolutePath()),
+                    ex.getMessage());
+        }
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        reader.close();
+        try
+        {
+            reader.readBoolean("dataSet");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals(String.format("HDF5 file '%s' is closed.", datasetFile.getAbsolutePath()),
+                    ex.getMessage());
+        }
+    }
+
+    @Test
+    public void testDataSetsNonExtendable()
+    {
+        final File datasetFile = new File(workingDirectory, "datasetsNonExtendable.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).dontUseExtendableDataTypes()
+                        .syncMode(SyncMode.SYNC_BLOCK).writer();
+        final String floatDatasetName = "/Group1/floats";
+        final float[] floatDataWritten = new float[]
+            { 2.8f, 8.2f, -3.1f, 0.0f, 10000.0f };
+        writer.float32().writeArray(floatDatasetName, floatDataWritten);
+        final String compressedFloatDatasetName = "/Group1/floatsCompressed";
+        writer.float32().writeArray(compressedFloatDatasetName, floatDataWritten, FLOAT_DEFLATE);
+        final long[] longDataWritten = new long[]
+            { 10, -1000000, 1, 0, 100000000000L };
+        final String longDatasetName = "/Group2/longs";
+        writer.int64().writeArray(longDatasetName, longDataWritten);
+        final long[] longDataWrittenAboveCompactThreshold = new long[128];
+        for (int i = 0; i < longDataWrittenAboveCompactThreshold.length; ++i)
+        {
+            longDataWrittenAboveCompactThreshold[i] = i;
+        }
+        final String longDatasetNameAboveCompactThreshold = "/Group2/longsContiguous";
+        writer.int64().writeArray(longDatasetNameAboveCompactThreshold,
+                longDataWrittenAboveCompactThreshold);
+        final String longDatasetNameAboveCompactThresholdCompress = "/Group2/longsChunked";
+        writer.int64().writeArray(longDatasetNameAboveCompactThresholdCompress,
+                longDataWrittenAboveCompactThreshold, INT_DEFLATE);
+        final byte[] byteDataWritten = new byte[]
+            { 0, -1, 1, -128, 127 };
+        final String byteDatasetName = "/Group2/bytes";
+        writer.int8().writeArray(byteDatasetName, byteDataWritten, INT_DEFLATE);
+        final String stringDataWritten = "Some Random String";
+        final String stringDatasetName = "/Group3/strings";
+        final String stringDatasetName2 = "/Group4/strings";
+        writer.string().write(stringDatasetName, stringDataWritten);
+        writer.string().writeVL(stringDatasetName2, stringDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[] floatDataRead = reader.float32().readArray(floatDatasetName);
+        HDF5DataSetInformation info = reader.getDataSetInformation(floatDatasetName);
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertTrue(info.isSigned());
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        final long[] compressedLongDataRead =
+                reader.int64().readArray(longDatasetNameAboveCompactThresholdCompress);
+        info = reader.getDataSetInformation(longDatasetNameAboveCompactThresholdCompress);
+        assertChunkSizes(info, longDataWrittenAboveCompactThreshold.length);
+        assertTrue(Arrays.equals(longDataWrittenAboveCompactThreshold, compressedLongDataRead));
+        final long[] longDataRead = reader.int64().readArray(longDatasetName);
+        info = reader.getDataSetInformation(longDatasetName);
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertTrue(Arrays.equals(longDataWritten, longDataRead));
+        final long[] longDataReadAboveCompactThreshold =
+                reader.int64().readArray(longDatasetNameAboveCompactThreshold);
+        info = reader.getDataSetInformation(longDatasetNameAboveCompactThreshold);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertTrue(Arrays.equals(longDataWrittenAboveCompactThreshold,
+                longDataReadAboveCompactThreshold));
+        final byte[] byteDataRead = reader.int8().readArray(byteDatasetName);
+        assertTrue(Arrays.equals(byteDataWritten, byteDataRead));
+        final String stringDataRead = reader.readString(stringDatasetName);
+        assertEquals(stringDataWritten, stringDataRead);
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteContiguousDataSet()
+    {
+        // Test for a bug in 1.8.1 and 1.8.2 when overwriting contiguous data sets and thereby
+        // changing its size.
+        // We have some workaround code in IHDF5Writer.getDataSetId(), this is why this test runs
+        // green. As new versions of HDF5 become available, one can try to comment out the
+        // workaround code and see whether this test still runs red.
+        final File datasetFile = new File(workingDirectory, "overwriteContiguousDataSet.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final String dsName = "longArray";
+        IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).dontUseExtendableDataTypes()
+                        .writer();
+        // Creating the group is part of the "bug magic".
+        writer.object().createGroup("group");
+        final long[] arrayWritten1 = new long[1000];
+        for (int i = 0; i < arrayWritten1.length; ++i)
+        {
+            arrayWritten1[i] = i;
+        }
+        writer.int64().writeArray(dsName, arrayWritten1);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final long[] arrayWritten2 = new long[5];
+        for (int i = 0; i < arrayWritten1.length; ++i)
+        {
+            arrayWritten1[i] = i * i;
+        }
+        writer.int64().writeArray(dsName, arrayWritten2);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final long[] arrayRead = reader.int64().readArray(dsName);
+        assertTrue(Arrays.equals(arrayWritten2, arrayRead));
+        reader.close();
+    }
+
+    @Test
+    public void testCompactDataset()
+    {
+        final File datasetFile = new File(workingDirectory, "compactDS.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final String dsName = "ds";
+        long[] data = new long[]
+            { 1, 2, 3 };
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.int64().writeArray(dsName, data, HDF5IntStorageFeatures.INT_COMPACT);
+        assertEquals(HDF5StorageLayout.COMPACT, writer.getDataSetInformation(dsName)
+                .getStorageLayout());
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(Arrays.equals(data, reader.int64().readArray(dsName)));
+        reader.close();
+    }
+
+    @Test
+    public void testCreateEmptyFixedSizeDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "createEmptyFixedSizeDataSets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5Factory.open(datasetFile);
+        writer.int64().createArray("longArr", 5, HDF5IntStorageFeatures.INT_COMPACT);
+        writer.int64().createMDArray("longMDArr", new int[]
+            { 5, 5 }, HDF5IntStorageFeatures.INT_COMPACT);
+        writer.bool()
+                .createBitFieldArray("bitfieldArr", 128, 5, HDF5IntStorageFeatures.INT_COMPACT);
+        writer.enumeration().createArray("enumArr", writer.enumeration().getAnonType(new String[]
+            { "a", "b", "c" }), 5, HDF5IntStorageFeatures.INT_COMPACT);
+        writer.enumeration().createMDArray("enumMDArr",
+                writer.enumeration().getAnonType(new String[]
+                    { "a", "b", "c" }), new int[]
+                    { 5, 5 }, HDF5IntStorageFeatures.INT_COMPACT);
+        writer.close();
+        IHDF5Reader reader = HDF5Factory.openForReading(datasetFile);
+        HDF5DataSetInformation info = reader.getDataSetInformation("longArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("longMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5, 5 }, info.getDimensions()));
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("enumArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("enumMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5, 5 }, info.getDimensions()));
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("bitfieldArr");
+        assertTrue(Arrays.equals(new long[]
+            { 2, 5 }, info.getDimensions()));
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testCreateEmptyGrowableDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "createEmptyGrowableDataSets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5Factory.open(datasetFile);
+        writer.int64().createArray("longArr", 5);
+        writer.int64().createMDArray("longMDArr", new int[]
+            { 5, 5 });
+        writer.bool().createBitFieldArray("bitfieldArr", 128, 5);
+        writer.enumeration().createArray("enumArr", writer.enumeration().getAnonType(new String[]
+            { "a", "b", "c" }), 5);
+        writer.enumeration().createMDArray("enumMDArr",
+                writer.enumeration().getAnonType(new String[]
+                    { "a", "b", "c" }), new int[]
+                    { 5, 5 });
+        writer.close();
+        IHDF5Reader reader = HDF5Factory.openForReading(datasetFile);
+        HDF5DataSetInformation info = reader.object().getDataSetInformation("longArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("longMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5, 5 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("enumArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("enumMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5, 5 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("bitfieldArr");
+        assertTrue(Arrays.equals(new long[]
+            { 2, 5 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 2, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testCreateZeroLengthGrowableDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "createZeroLengthGrowableDataSets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5Factory.open(datasetFile);
+        writer.int64().createArray("longArr", 5, INT_CHUNKED);
+        writer.int64().createMDArray("longMDArr", new int[]
+            { 5, 5 }, INT_CHUNKED);
+        writer.bool().createBitFieldArray("bitfieldArr", 128, 5, INT_CHUNKED);
+        writer.enumeration().createArray("enumArr", writer.enumeration().getAnonType(new String[]
+            { "a", "b", "c" }), 5, INT_CHUNKED);
+        writer.enumeration().createMDArray("enumMDArr",
+                writer.enumeration().getAnonType(new String[]
+                    { "a", "b", "c" }), new int[]
+                    { 5, 5 }, INT_CHUNKED);
+        writer.close();
+        IHDF5Reader reader = HDF5Factory.openForReading(datasetFile);
+        HDF5DataSetInformation info = reader.object().getDataSetInformation("longArr");
+        assertTrue(Arrays.equals(new long[]
+            { 0 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("longMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 0, 0 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("enumArr");
+        assertTrue(Arrays.equals(new long[]
+            { 0 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("enumMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 0, 0 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 5, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        info = reader.object().getDataSetInformation("bitfieldArr");
+        assertTrue(Arrays.equals(new long[]
+            { 2, 0 }, info.getDimensions()));
+        assertTrue(Arrays.equals(new int[]
+            { 2, 5 }, info.tryGetChunkSizes()));
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testCreateEmptyDefaultFixedSizeDataSets()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "createEmptyDefaultFixedSizeDataSets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer =
+                HDF5Factory.configure(datasetFile).dontUseExtendableDataTypes().writer();
+        writer.int64().createArray("longArr", 5);
+        writer.int64().createMDArray("longMDArr", new int[]
+            { 5, 5 });
+        writer.bool().createBitFieldArray("bitfieldArr", 128, 5);
+        writer.enumeration().createArray("enumArr", writer.enumeration().getAnonType(new String[]
+            { "a", "b", "c" }), 5);
+        writer.close();
+        IHDF5Reader reader = HDF5Factory.openForReading(datasetFile);
+        HDF5DataSetInformation info = reader.getDataSetInformation("longArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("longMDArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5, 5 }, info.getDimensions()));
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("enumArr");
+        assertTrue(Arrays.equals(new long[]
+            { 5 }, info.getDimensions()));
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        info = reader.getDataSetInformation("bitfieldArr");
+        assertTrue(Arrays.equals(new long[]
+            { 2, 5 }, info.getDimensions()));
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testExtendChunkedDataset()
+    {
+        final File datasetFile = new File(workingDirectory, "extendChunked.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final String dsName = "ds";
+        long[] data = new long[]
+            { 1, 2, 3, 4 };
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.int64().createArray(dsName, 5, 3);
+        writer.int64().writeArray(dsName, data, HDF5IntStorageFeatures.INT_NO_COMPRESSION_KEEP);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        long[] dataRead = reader.int64().readArray(dsName);
+        assertTrue(Arrays.equals(data, dataRead));
+        reader.close();
+        // Now write a larger data set and see whether the data set is correctly extended.
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        data = new long[]
+            { 17, 42, 1, 2, 3, 101, -5 };
+        writer.int64().writeArray(dsName, data);
+        writer.close();
+        reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        dataRead = reader.int64().readArray(dsName);
+        assertTrue(Arrays.equals(data, dataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testExtendContiguousDataset()
+    {
+        final File datasetFile = new File(workingDirectory, "extendContiguous.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final String dsName = "ds";
+        long[] longArrayWritten = new long[]
+            { 1, 2, 3 };
+        IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).dontUseExtendableDataTypes()
+                        .writer();
+        // Set maxdims such that COMPACT_LAYOUT_THRESHOLD (int bytes!) is exceeded so that we get a
+        // contiguous data set.
+        writer.int64().createArray(dsName, 128, 1);
+        writer.int64().writeArray(dsName, longArrayWritten);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final long[] longArrayRead = reader.int64().readArray(dsName);
+        assertTrue(Arrays.equals(longArrayWritten, longArrayRead));
+        reader.close();
+        // Now write a larger data set and see whether the data set is correctly extended.
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        longArrayWritten = new long[]
+            { 17, 42, 1, 2, 3 };
+        writer.int64().writeArray(dsName, longArrayWritten);
+        writer.close();
+        reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(Arrays.equals(longArrayWritten, reader.int64().readArray(dsName)));
+        reader.close();
+    }
+
+    @Test
+    public void testAutomaticDeletionOfDataSetOnWrite()
+    {
+        final File datasetFile = new File(workingDirectory, "automaticDeletionOfDataSetOnWrite.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = new HDF5WriterConfigurator(datasetFile).writer();
+        writer.float32().createArray("f", 12, HDF5FloatStorageFeatures.FLOAT_COMPACT);
+        writer.float32().writeArray("f", new float[]
+            { 1f, 2f, 3f, 4f, 5f });
+        writer.close();
+        final IHDF5Reader reader = new HDF5ReaderConfigurator(datasetFile).reader();
+        HDF5DataSetInformation info = reader.getDataSetInformation("f");
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        assertEquals(5, info.tryGetChunkSizes()[0]);
+        reader.close();
+    }
+
+    @Test
+    public void testAutomaticDeletionOfDataSetOnCreate()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "automaticDeletionOfDataSetOnCreate.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = new HDF5WriterConfigurator(datasetFile).writer();
+        writer.float32().createArray("f", 12, 6, HDF5FloatStorageFeatures.FLOAT_COMPACT);
+        writer.float32().createArray("f", 10, HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS);
+        // This won't overwrite the data set as it is a block write command.
+        writer.float32().writeArrayBlock("f", new float[]
+            { 1f, 2f, 3f, 4f, 5f }, 0);
+        writer.close();
+        final IHDF5Reader reader = new HDF5ReaderConfigurator(datasetFile).reader();
+        HDF5DataSetInformation info = reader.getDataSetInformation("f");
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, info.getStorageLayout());
+        assertEquals(10, info.getDimensions()[0]);
+        assertNull(info.tryGetChunkSizes());
+        reader.close();
+    }
+
+    @Test
+    public void testSpacesInDataSetName()
+    {
+        final File datasetFile = new File(workingDirectory, "datasetsWithSpaces.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "Float Dataset";
+        final float[] floatDataWritten = new float[]
+            { 2.8f, 8.2f, -3.1f, 0.0f, 10000.0f };
+        writer.float32().writeArray(floatDatasetName, floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[] floatDataRead = reader.float32().readArray(floatDatasetName);
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testFloatArrayTypeDataSet()
+    {
+        final File datasetFile = new File(workingDirectory, "floatArrayTypeDataSet.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        final float[] floatDataWritten = new float[]
+            { 2.8f, 8.2f, -3.1f, 0.0f, 10000.0f };
+        efWriter.writeFloatArrayArrayType("f", floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals("FLOAT(4, #5):{}", reader.getDataSetInformation("f").toString());
+        final float[] floatDataRead = reader.float32().readArray("f");
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testDoubleArrayAsByteArray()
+    {
+        final File datasetFile = new File(workingDirectory, "doubleArrayTypeDataSetAsByteArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        writer.float64().createArray("f", 6, 3);
+        final double[] floatDataWritten = new double[]
+            { 2.8, 8.2, -3.1, 0.0, 10000.0 };
+        efWriter.writeDoubleArrayBigEndian("f", floatDataWritten,
+                HDF5FloatStorageFeatures.FLOAT_NO_COMPRESSION_KEEP);
+        final double[] floatDataWritten2 = new double[]
+            { 2.8, 8.2, -3.1, 0.0 };
+        writer.float64().writeMDArray("f2", new MDDoubleArray(floatDataWritten2, new int[]
+            { 2, 2 }));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals("FLOAT(8):{5}", reader.getDataSetInformation("f").toString());
+        final byte[] byteDataRead = reader.readAsByteArray("f");
+        final double[] floatDataRead = NativeData.byteToDouble(byteDataRead, ByteOrder.NATIVE);
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        final byte[] byteDataRead2 = reader.readAsByteArray("f2");
+        final double[] floatDataRead2 = NativeData.byteToDouble(byteDataRead2, ByteOrder.NATIVE);
+        assertTrue(Arrays.equals(floatDataWritten2, floatDataRead2));
+        byte[] byteDataBlockRead = reader.opaque().readArrayBlock("f", 2, 1);
+        assertEquals(16, byteDataBlockRead.length);
+        assertEquals(floatDataWritten[2],
+                NativeData.byteToDouble(byteDataBlockRead, ByteOrder.NATIVE, 0, 1)[0]);
+        assertEquals(floatDataWritten[3],
+                NativeData.byteToDouble(byteDataBlockRead, ByteOrder.NATIVE, 8, 1)[0]);
+
+        byteDataBlockRead = reader.opaque().readArrayBlockWithOffset("f", 2, 1);
+        assertEquals(16, byteDataBlockRead.length);
+        assertEquals(floatDataWritten[1],
+                NativeData.byteToDouble(byteDataBlockRead, ByteOrder.NATIVE, 0, 1)[0]);
+        assertEquals(floatDataWritten[2],
+                NativeData.byteToDouble(byteDataBlockRead, ByteOrder.NATIVE, 8, 1)[0]);
+        final double[][] values =
+            {
+                { 2.8, 8.2, -3.1 },
+                { 0.0, 10000.0 } };
+        int i = 0;
+        for (HDF5DataBlock<byte[]> block : reader.opaque().getArrayNaturalBlocks("f"))
+        {
+            assertEquals(i, block.getIndex());
+            assertEquals(i * 3, block.getOffset());
+            assertTrue(Arrays.equals(values[i],
+                    NativeData.byteToDouble(block.getData(), ByteOrder.NATIVE)));
+            ++i;
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testFloatArrayTypeDataSetOverwrite()
+    {
+        final File datasetFile = new File(workingDirectory, "floatArrayTypeDataSetOverwrite.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        final float[] floatDataWritten = new float[]
+            { 2.8f, 8.2f, -3.1f, 0.0f, 10000.0f };
+        efWriter.writeFloatArrayArrayType("f", floatDataWritten);
+        final float[] floatDataWritten2 = new float[]
+            { 0.1f, 8.2f, -3.1f, 0.0f, 20000.0f };
+        writer.float32().writeArray("f", floatDataWritten2);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals("FLOAT(4):{5}", reader.getDataSetInformation("f").toString());
+        final float[] floatDataRead = reader.float32().readArray("f");
+        assertTrue(Arrays.equals(floatDataWritten2, floatDataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testFloatMDArrayTypeDataSet()
+    {
+        final File datasetFile = new File(workingDirectory, "floatMDArrayTypeDataSet.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        final MDFloatArray floatDataWritten = new MDFloatArray(new float[]
+            { 2.8f, 8.2f, -3.1f, -0.1f, 10000.0f, 1.111f }, new int[]
+            { 3, 2 });
+        efWriter.writeFloatArrayArrayType("fa", floatDataWritten);
+        efWriter.writeFloat2DArrayArrayType1DSpace1d("fas", floatDataWritten);
+        writer.float32().writeMDArray("f", floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals("FLOAT(4, [3,2]):{}", reader.getDataSetInformation("fa").toString());
+        final MDFloatArray floatDataReadFa = reader.float32().readMDArray("fa");
+        assertEquals(floatDataWritten, floatDataReadFa);
+        final MDFloatArray floatDataReadFas = reader.float32().readMDArray("fas");
+        assertEquals(floatDataWritten, floatDataReadFas);
+        final MDFloatArray floatBlock = new MDFloatArray(new float[]
+            { -3.1f, -0.1f }, new int[]
+            { 1, 2 });
+        assertEquals(floatBlock, reader.float32().readMDArrayBlock("f", new int[]
+            { 1, -1 }, new long[]
+            { 1, 0 }));
+        assertEquals(floatBlock, reader.float32().readMDArrayBlock("fas", new int[]
+            { 1, -1 }, new long[]
+            { 1, 0 }));
+        try
+        {
+            reader.float32().readMDArrayBlock("fa", new int[]
+                { 1, -1 }, new long[]
+                { 1, 0 });
+            fail("Illegal block-wise reading of array-type not detected.");
+        } catch (HDF5JavaException ex)
+        {
+            // Expected
+        }
+        assertEquals(2, reader.object().getRank("f"));
+        assertTrue(Arrays.equals(new long[]
+            { 3, 2 }, reader.object().getDimensions("f")));
+        assertEquals(2, reader.object().getRank("fa"));
+        assertTrue(Arrays.equals(new long[]
+            { 3, 2 }, reader.object().getDimensions("fa")));
+        assertEquals(2, reader.object().getRank("fas"));
+        assertTrue(Arrays.equals(new long[]
+            { 3, 2 }, reader.object().getDimensions("fas")));
+        reader.close();
+    }
+
+    @Test
+    public void testFloatArrayCreateCompactOverwriteBlock()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "testFloatArrayCreateCompactOverwroteBlock.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.float32().writeArray("f", new float[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, HDF5FloatStorageFeatures.FLOAT_COMPACT);
+        writer.float32().writeArrayBlockWithOffset("f", new float[]
+            { 400, 500, 600 }, 3, 3);
+        float[] arrayWritten = new float[]
+            { 1, 2, 3, 400, 500, 600, 7, 8, 9, 10 };
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(Arrays.equals(arrayWritten, reader.float32().readArray("f")));
+        reader.close();
+    }
+
+    @Test
+    public void testReadFloatMatrixDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "readFloatMatrixBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[][] floatMatrix = new float[10][10];
+        for (int i = 0; i < floatMatrix.length; ++i)
+        {
+            for (int j = 0; j < floatMatrix[i].length; ++j)
+            {
+                floatMatrix[i][j] = i * j;
+            }
+        }
+        writer.float32().writeMatrix(dsName, floatMatrix);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final int blockSize = 5;
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                final float[][] floatMatrixBlockRead =
+                        reader.float32().readMatrixBlock(dsName, blockSize, blockSize, i, j);
+                assertEquals(blockSize, floatMatrixBlockRead.length);
+                assertEquals(blockSize, floatMatrixBlockRead[0].length);
+                final float[][] floatMatrixBlockExpected = new float[blockSize][];
+                for (int k = 0; k < blockSize; ++k)
+                {
+                    final float[] rowExpected = new float[blockSize];
+                    System.arraycopy(floatMatrix[i * blockSize + k], blockSize * j, rowExpected, 0,
+                            blockSize);
+                    floatMatrixBlockExpected[k] = rowExpected;
+                }
+                assertMatrixEquals(floatMatrixBlockExpected, floatMatrixBlockRead);
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testMDIntArrayDifferentSizesElementType()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "testMDIntArrayDifferentSizesElementType.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final MDIntArray arr = new MDIntArray(new int[]
+            { 2, 2 });
+        arr.set(1, 0, 0);
+        arr.set(2, 0, 1);
+        arr.set(3, 1, 0);
+        arr.set(4, 1, 1);
+        arr.incNumberOfHyperRows(1);
+        arr.set(5, 2, 0);
+        arr.set(6, 2, 1);
+        writer.int16().createMDArray("array", new int[]
+            { 3, 2 });
+        writer.int32().writeMDArrayBlock("array", arr, new long[]
+            { 0, 0 });
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(arr, reader.int32().readMDArray("array"));
+        reader.close();
+    }
+
+    @Test
+    public void testMDIntArrayDifferentSizesElementTypeUnsignedByte()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "testMDIntArrayDifferentSizesElementTypeUnsignedByte.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final MDIntArray arr = new MDIntArray(new int[]
+            { 2, 2 });
+        arr.set(1, 0, 0);
+        arr.set(2, 0, 1);
+        arr.set(3, 1, 0);
+        arr.set(4, 1, 1);
+        arr.incNumberOfHyperRows(1);
+        arr.set(5, 2, 0);
+        arr.set(255, 2, 1);
+        writer.uint8().createMDArray("array", new int[]
+            { 3, 2 });
+        writer.int32().writeMDArrayBlock("array", arr, new long[]
+            { 0, 0 });
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(arr, reader.int32().readMDArray("array"));
+    }
+
+    @Test
+    public void testSetExtentBug()
+    {
+        final File datasetFile = new File(workingDirectory, "setExtentBug.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[][] floatMatrixBlockWritten = new float[][]
+            {
+                { 1, 2 },
+                { 3, 4 } };
+        final int blockSize = 2;
+        writer.float32().createMatrix(dsName, 0, 0, blockSize, blockSize);
+        writer.float32().writeMatrixBlock(dsName, floatMatrixBlockWritten, 0, 0);
+        writer.float32().writeMatrixBlock(dsName, floatMatrixBlockWritten, 0, 1);
+        // The next line will make the the block (0,1) disappear if the bug is present.
+        writer.float32().writeMatrixBlock(dsName, floatMatrixBlockWritten, 1, 0);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] floatMatrixBlockRead =
+                reader.float32().readMatrixBlock(dsName, blockSize, blockSize, 0, 1);
+        assertMatrixEquals(floatMatrixBlockWritten, floatMatrixBlockRead);
+        reader.close();
+    }
+
+    @Test
+    public void testWriteFloatMatrixDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "writeFloatMatrixBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[][] floatMatrixBlockWritten = new float[5][5];
+        for (int i = 0; i < floatMatrixBlockWritten.length; ++i)
+        {
+            for (int j = 0; j < floatMatrixBlockWritten[i].length; ++j)
+            {
+                floatMatrixBlockWritten[i][j] = i * j;
+            }
+        }
+        final int blockSize = 5;
+        writer.float32().createMatrix(dsName, 2 * blockSize, 2 * blockSize, blockSize, blockSize);
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                writer.float32().writeMatrixBlock(dsName, floatMatrixBlockWritten, i, j);
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                final float[][] floatMatrixBlockRead =
+                        reader.float32().readMatrixBlock(dsName, blockSize, blockSize, i, j);
+                assertMatrixEquals(floatMatrixBlockWritten, floatMatrixBlockRead);
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testWriteFloatMatrixDataSetBlockWiseWithOffset()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "writeFloatMatrixBlockWiseWithOffset.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[][] floatMatrixBlockWritten = new float[5][5];
+        int count = 0;
+        for (int i = 0; i < floatMatrixBlockWritten.length; ++i)
+        {
+            for (int j = 0; j < floatMatrixBlockWritten[i].length; ++j)
+            {
+                floatMatrixBlockWritten[i][j] = ++count;
+            }
+        }
+        final int blockSize = 5;
+        final int offsetX = 2;
+        final int offsetY = 3;
+        writer.float32().createMatrix(dsName, 2 * blockSize, 2 * blockSize, blockSize, blockSize);
+        writer.float32().writeMatrixBlockWithOffset(dsName, floatMatrixBlockWritten, 5, 5, offsetX,
+                offsetY);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] floatMatrixBlockRead =
+                reader.float32().readMatrixBlockWithOffset(dsName, blockSize, blockSize, offsetX,
+                        offsetY);
+        assertMatrixEquals(floatMatrixBlockWritten, floatMatrixBlockRead);
+        final float[][] floatMatrixRead = reader.float32().readMatrix(dsName);
+        // Subtract the non-zero block.
+        for (int i = 0; i < floatMatrixBlockWritten.length; ++i)
+        {
+            for (int j = 0; j < floatMatrixBlockWritten[i].length; ++j)
+            {
+                floatMatrixRead[offsetX + i][offsetY + j] -= floatMatrixBlockWritten[i][j];
+            }
+        }
+        for (int i = 0; i < floatMatrixRead.length; ++i)
+        {
+            for (int j = 0; j < floatMatrixRead[i].length; ++j)
+            {
+                assertEquals(i + ":" + j, 0.0f, floatMatrixRead[i][j]);
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testReadMDFloatArrayAsByteArray()
+    {
+        final File datasetFile = new File(workingDirectory, "readMDFloatArrayAsByteArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.float32().writeMatrix("fm", new float[][]
+            {
+                { 1f, 2f },
+                { 3f, 4f } });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] arr = reader.readAsByteArray("fm");
+        assertEquals(1f, NativeData.byteToFloat(arr, ByteOrder.NATIVE, 0, 1)[0]);
+        assertEquals(2f, NativeData.byteToFloat(arr, ByteOrder.NATIVE, 4, 1)[0]);
+        assertEquals(3f, NativeData.byteToFloat(arr, ByteOrder.NATIVE, 8, 1)[0]);
+        assertEquals(4f, NativeData.byteToFloat(arr, ByteOrder.NATIVE, 12, 1)[0]);
+        try
+        {
+            reader.opaque().readArrayBlock("fm", 2, 0);
+            fail("readAsByteArrayBlock() is expected to fail on datasets of rank > 1");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Data Set is expected to be of rank 1 (rank=2)", ex.getMessage());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testReadByteArrayDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "readByteArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final byte[] byteArray = new byte[100];
+        for (int i = 0; i < byteArray.length; ++i)
+        {
+            byteArray[i] = (byte) (100 + i);
+        }
+        writer.int8().writeArray(dsName, byteArray);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final int blockSize = 10;
+        for (int i = 0; i < 10; ++i)
+        {
+            final byte[] byteArrayBlockRead = reader.int8().readArrayBlock(dsName, blockSize, i);
+            assertEquals(blockSize, byteArrayBlockRead.length);
+            final byte[] byteArrayBlockExpected = new byte[blockSize];
+            System.arraycopy(byteArray, blockSize * i, byteArrayBlockExpected, 0, blockSize);
+            assertTrue("Block " + i, Arrays.equals(byteArrayBlockExpected, byteArrayBlockRead));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testWriteByteArrayDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "writeByteArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 100;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        writer.int8().createArray(dsName, size, blockSize, INT_DEFLATE);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            writer.int8().writeArrayBlock(dsName, block, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.readAsByteArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testCreateByteArrayDataSetBlockSize0()
+    {
+        final File datasetFile = new File(workingDirectory, "testCreateByteArrayDataSetBlockSize0");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 100;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        final int nominalBlockSize = 0;
+        writer.int8().createArray(dsName, size, nominalBlockSize, INT_DEFLATE);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            writer.int8().writeArrayBlock(dsName, block, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.readAsByteArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testCreateFloatArrayWithDifferentStorageLayouts()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "testCreateFloatArrayWithDifferentStorageLayouts");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName1 = "ds1";
+        final String dsName2 = "ds2";
+        final int size = 100;
+        writer.float32().createArray(dsName1, size, HDF5FloatStorageFeatures.FLOAT_CONTIGUOUS);
+        writer.float32().createArray(dsName2, size, HDF5FloatStorageFeatures.FLOAT_CHUNKED);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info1 = reader.getDataSetInformation(dsName1);
+        final HDF5DataSetInformation info2 = reader.getDataSetInformation(dsName2);
+        reader.close();
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, info1.getStorageLayout());
+        assertEquals(size, info1.getDimensions()[0]);
+        assertNull(info1.tryGetChunkSizes());
+        assertEquals(HDF5StorageLayout.CHUNKED, info2.getStorageLayout());
+        assertEquals(0, info2.getDimensions()[0]);
+        assertEquals(size, info2.tryGetChunkSizes()[0]);
+    }
+
+    @Test
+    public void testWriteByteArrayDataSetBlockWiseExtend()
+    {
+        final File datasetFile = new File(workingDirectory, "writeByteArrayBlockWiseExtend.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 100;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        writer.int8().createArray(dsName, 0, blockSize, INT_DEFLATE);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            writer.int8().writeArrayBlock(dsName, block, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.readAsByteArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testWriteByteArrayDataSetBlockWiseMismatch()
+    {
+        final File datasetFile = new File(workingDirectory, "writeByteArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 99;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        writer.int8().createArray(dsName, size, blockSize, INT_DEFLATE);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            if (blockSize * (i + 1) > size)
+            {
+                final int ofs = blockSize * i;
+                writer.int8().writeArrayBlockWithOffset(dsName, block, size - ofs, ofs);
+            } else
+            {
+                writer.int8().writeArrayBlock(dsName, block, i);
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.int8().readArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testWriteOpaqueByteArrayDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "writeOpaqueByteArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 100;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        final HDF5OpaqueType opaqueDataType =
+                writer.opaque()
+                        .createArray(dsName, "TAG", size / 2, blockSize, GENERIC_DEFLATE_MAX);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            writer.opaque().writeArrayBlock(dsName, opaqueDataType, block, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.readAsByteArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testWriteOpaqueByteArrayDataSetBlockWiseMismatch()
+    {
+        final File datasetFile = new File(workingDirectory, "writeOpaqueByteArrayBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int size = 99;
+        final int blockSize = 10;
+        final int numberOfBlocks = 10;
+        final HDF5OpaqueType opaqueDataType =
+                writer.opaque().createArray(dsName, "TAG", size, blockSize, GENERIC_DEFLATE);
+        final byte[] block = new byte[blockSize];
+        for (int i = 0; i < numberOfBlocks; ++i)
+        {
+            Arrays.fill(block, (byte) i);
+            if (blockSize * (i + 1) > size)
+            {
+                final int ofs = blockSize * i;
+                writer.opaque().writeArrayBlockWithOffset(dsName, opaqueDataType, block,
+                        size - ofs, ofs);
+            } else
+            {
+                writer.opaque().writeArrayBlock(dsName, opaqueDataType, block, i);
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] byteArrayRead = reader.readAsByteArray(dsName);
+        reader.close();
+        assertEquals(size, byteArrayRead.length);
+        for (int i = 0; i < byteArrayRead.length; ++i)
+        {
+            assertEquals("Byte " + i, (i / blockSize), byteArrayRead[i]);
+        }
+    }
+
+    @Test
+    public void testWriteByteMatrixDataSetBlockWise()
+    {
+        final File datasetFile = new File(workingDirectory, "writeByteMatrixBlockWise.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int sizeX = 100;
+        final int sizeY = 10;
+        final int blockSizeX = 10;
+        final int blockSizeY = 5;
+        final int numberOfBlocksX = 10;
+        final int numberOfBlocksY = 2;
+        writer.int8().createMatrix(dsName, sizeX, sizeY, blockSizeX, blockSizeY, INT_DEFLATE);
+        final byte[][] block = new byte[blockSizeX][blockSizeY];
+        for (int i = 0; i < numberOfBlocksX; ++i)
+        {
+            for (int j = 0; j < numberOfBlocksY; ++j)
+            {
+                for (int k = 0; k < blockSizeX; ++k)
+                {
+                    Arrays.fill(block[k], (byte) (i + j));
+                }
+                writer.int8().writeMatrixBlock(dsName, block, i, j);
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[][] byteMatrixRead = reader.int8().readMatrix(dsName);
+        reader.close();
+        assertEquals(sizeX, byteMatrixRead.length);
+        for (int i = 0; i < byteMatrixRead.length; ++i)
+        {
+            for (int j = 0; j < byteMatrixRead[i].length; ++j)
+            {
+                assertEquals("Byte (" + i + "," + j + ")", (i / blockSizeX + j / blockSizeY),
+                        byteMatrixRead[i][j]);
+            }
+        }
+    }
+
+    @Test
+    public void testWriteByteMatrixDataSetBlockWiseMismatch()
+    {
+        final File datasetFile = new File(workingDirectory, "writeByteMatrixBlockWiseMismatch.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final int sizeX = 99;
+        final int sizeY = 12;
+        final int blockSizeX = 10;
+        final int blockSizeY = 5;
+        final int numberOfBlocksX = 10;
+        final int numberOfBlocksY = 3;
+        writer.int8().createMatrix(dsName, sizeX, sizeY, blockSizeX, blockSizeY, INT_DEFLATE);
+        final byte[][] block = new byte[blockSizeX][blockSizeY];
+        for (int i = 0; i < numberOfBlocksX; ++i)
+        {
+            for (int j = 0; j < numberOfBlocksY; ++j)
+            {
+                for (int k = 0; k < blockSizeX; ++k)
+                {
+                    Arrays.fill(block[k], (byte) (i + j));
+                }
+                writer.int8().writeMatrixBlockWithOffset(dsName, block,
+                        Math.min(blockSizeX, sizeX - i * blockSizeX),
+                        Math.min(blockSizeY, sizeY - j * blockSizeY), i * blockSizeX,
+                        j * blockSizeY);
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[][] byteMatrixRead = reader.int8().readMatrix(dsName);
+        reader.close();
+        assertEquals(sizeX, byteMatrixRead.length);
+        for (int i = 0; i < byteMatrixRead.length; ++i)
+        {
+            for (int j = 0; j < byteMatrixRead[i].length; ++j)
+            {
+                assertEquals("Byte (" + i + "," + j + ")", (i / blockSizeX + j / blockSizeY),
+                        byteMatrixRead[i][j]);
+            }
+        }
+    }
+
+    @Test
+    public void testReadToFloatMDArray()
+    {
+        final File datasetFile = new File(workingDirectory, "readToFloatMDArray.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final MDFloatArray arrayWritten = new MDFloatArray(new float[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, new int[]
+            { 3, 3 });
+        writer.float32().writeMDArray(dsName, arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final MDFloatArray arrayRead = new MDFloatArray(new int[]
+            { 10, 10 });
+        final int memOfsX = 2;
+        final int memOfsY = 3;
+        reader.float32().readToMDArrayWithOffset(dsName, arrayRead, new int[]
+            { memOfsX, memOfsY });
+        reader.close();
+        final boolean[][] isSet = new boolean[10][10];
+        for (int i = 0; i < arrayWritten.size(0); ++i)
+        {
+            for (int j = 0; j < arrayWritten.size(1); ++j)
+            {
+                isSet[memOfsX + i][memOfsY + j] = true;
+                assertEquals("(" + i + "," + j + ")", arrayWritten.get(i, j),
+                        arrayRead.get(memOfsX + i, memOfsY + j));
+            }
+        }
+        for (int i = 0; i < arrayRead.size(0); ++i)
+        {
+            for (int j = 0; j < arrayRead.size(1); ++j)
+            {
+                if (isSet[i][j] == false)
+                {
+                    assertEquals("(" + i + "," + j + ")", 0f, arrayRead.get(i, j));
+                }
+            }
+        }
+    }
+
+    @DataProvider
+    private Object[][] provideSizes()
+    {
+        return new Object[][]
+            {
+                { 10, 99 },
+                { 10, 100 },
+                { 10, 101 } };
+    }
+
+    @Test(dataProvider = "provideSizes")
+    public void testIterateOverFloatArrayInNaturalBlocks(int blockSize, int dataSetSize)
+    {
+        final File datasetFile =
+                new File(workingDirectory, "iterateOverFloatArrayInNaturalBlocks.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[] arrayWritten = new float[dataSetSize];
+        for (int i = 0; i < dataSetSize; ++i)
+        {
+            arrayWritten[i] = i;
+        }
+        writer.float32().createArray(dsName, dataSetSize, blockSize);
+        writer.float32().writeArrayBlock(dsName, arrayWritten, 0);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        int i = 0;
+        for (HDF5DataBlock<float[]> block : reader.float32().getArrayNaturalBlocks(dsName))
+        {
+            assertEquals(i, block.getIndex());
+            assertEquals(blockSize * i, block.getOffset());
+            final float[] arrayReadBlock = block.getData();
+            if (blockSize * (i + 1) > dataSetSize)
+            {
+                assertEquals(dataSetSize - i * blockSize, arrayReadBlock.length);
+            } else
+            {
+                assertEquals(blockSize, arrayReadBlock.length);
+            }
+            final float[] arrayWrittenBlock = new float[arrayReadBlock.length];
+            System.arraycopy(arrayWritten, (int) block.getOffset(), arrayWrittenBlock, 0,
+                    arrayWrittenBlock.length);
+            assertTrue(Arrays.equals(arrayWrittenBlock, arrayReadBlock));
+            ++i;
+        }
+        assertEquals(dataSetSize / blockSize + (dataSetSize % blockSize != 0 ? 1 : 0), i);
+        reader.close();
+    }
+
+    @Test
+    public void testReadToFloatMDArrayBlockWithOffset()
+    {
+        final File datasetFile = new File(workingDirectory, "readToFloatMDArrayBlockWithOffset.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final MDFloatArray arrayWritten = new MDFloatArray(new float[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, new int[]
+            { 3, 3 });
+        writer.float32().writeMDArray(dsName, arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final MDFloatArray arrayRead = new MDFloatArray(new int[]
+            { 10, 10 });
+        final int memOfsX = 2;
+        final int memOfsY = 3;
+        final int diskOfsX = 1;
+        final int diskOfsY = 0;
+        final int blockSizeX = 3;
+        final int blockSizeY = 2;
+        final int[] effectiveDimensions =
+                reader.float32().readToMDArrayBlockWithOffset(dsName, arrayRead, new int[]
+                    { blockSizeX, blockSizeY }, new long[]
+                    { diskOfsX, diskOfsY }, new int[]
+                    { memOfsX, memOfsY });
+        reader.close();
+        assertEquals(blockSizeX - 1, effectiveDimensions[0]);
+        assertEquals(blockSizeY, effectiveDimensions[1]);
+        final boolean[][] isSet = new boolean[10][10];
+        for (int i = 0; i < effectiveDimensions[0]; ++i)
+        {
+            for (int j = 0; j < effectiveDimensions[1]; ++j)
+            {
+                isSet[memOfsX + i][memOfsY + j] = true;
+                assertEquals("(" + i + "," + j + ")", arrayWritten.get(diskOfsX + i, diskOfsY + j),
+                        arrayRead.get(memOfsX + i, memOfsY + j));
+            }
+        }
+        for (int i = 0; i < arrayRead.size(0); ++i)
+        {
+            for (int j = 0; j < arrayRead.size(1); ++j)
+            {
+                if (isSet[i][j] == false)
+                {
+                    assertEquals("(" + i + "," + j + ")", 0f, arrayRead.get(i, j));
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testReadToTimeDurationMDArrayBlockWithOffset()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "readToTimeDurationMDArrayBlockWithOffset.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final HDF5TimeDurationMDArray arrayWritten = new HDF5TimeDurationMDArray(new long[]
+            { 1, 1, 1, 1, 1, 1, 1, 1, 1 }, new int[]
+            { 3, 3 }, HDF5TimeUnit.MINUTES);
+        writer.duration().writeMDArray(dsName, arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5TimeDurationMDArray arrayRead = new HDF5TimeDurationMDArray(new int[]
+            { 10, 10 }, HDF5TimeUnit.SECONDS);
+        final int memOfsX = 2;
+        final int memOfsY = 3;
+        final int diskOfsX = 1;
+        final int diskOfsY = 0;
+        final int blockSizeX = 3;
+        final int blockSizeY = 2;
+        final int[] effectiveDimensions =
+                reader.duration().readToMDArrayBlockWithOffset(dsName, arrayRead, new int[]
+                    { blockSizeX, blockSizeY }, new long[]
+                    { diskOfsX, diskOfsY }, new int[]
+                    { memOfsX, memOfsY });
+        reader.close();
+        assertEquals(blockSizeX - 1, effectiveDimensions[0]);
+        assertEquals(blockSizeY, effectiveDimensions[1]);
+        final boolean[][] isSet = new boolean[10][10];
+        for (int i = 0; i < effectiveDimensions[0]; ++i)
+        {
+            for (int j = 0; j < effectiveDimensions[1]; ++j)
+            {
+                isSet[memOfsX + i][memOfsY + j] = true;
+                assertEquals("(" + i + "," + j + ")",
+                        60 * arrayWritten.get(diskOfsX + i, diskOfsY + j),
+                        arrayRead.get(memOfsX + i, memOfsY + j));
+            }
+        }
+        for (int i = 0; i < arrayRead.size(0); ++i)
+        {
+            for (int j = 0; j < arrayRead.size(1); ++j)
+            {
+                if (isSet[i][j] == false)
+                {
+                    assertEquals("(" + i + "," + j + ")", 0, arrayRead.get(i, j));
+                }
+            }
+        }
+    }
+
+    @Test(dataProvider = "provideSizes")
+    public void testIterateOverStringArrayInNaturalBlocks(int blockSize, int dataSetSize)
+    {
+        final File datasetFile =
+                new File(workingDirectory, "testIterateOverStringArrayInNaturalBlocks.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final String[] arrayWritten = new String[dataSetSize];
+        for (int i = 0; i < dataSetSize; ++i)
+        {
+            arrayWritten[i] = "" + i;
+        }
+        writer.string().createArray(dsName, dataSetSize, blockSize);
+        writer.string().writeArrayBlock(dsName, arrayWritten, 0);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        int i = 0;
+        for (HDF5DataBlock<String[]> block : reader.string().getArrayNaturalBlocks(dsName))
+        {
+            assertEquals(i, block.getIndex());
+            assertEquals(blockSize * i, block.getOffset());
+            final String[] arrayReadBlock = block.getData();
+            if (blockSize * (i + 1) > dataSetSize)
+            {
+                assertEquals(dataSetSize - i * blockSize, arrayReadBlock.length);
+            } else
+            {
+                assertEquals(blockSize, arrayReadBlock.length);
+            }
+            final String[] arrayWrittenBlock = new String[arrayReadBlock.length];
+            System.arraycopy(arrayWritten, (int) block.getOffset(), arrayWrittenBlock, 0,
+                    arrayWrittenBlock.length);
+            assertTrue(Arrays.equals(arrayWrittenBlock, arrayReadBlock));
+            ++i;
+        }
+        assertEquals(dataSetSize / blockSize + (dataSetSize % blockSize != 0 ? 1 : 0), i);
+        reader.close();
+    }
+
+    @DataProvider
+    private Object[][] provideMDSizes()
+    {
+        return new Object[][]
+            {
+                { new int[]
+                    { 2, 2 }, new long[]
+                    { 4, 3 }, new float[]
+                    { 0f, 2f, 6f, 8f }, new int[][]
+                    {
+                        { 2, 2 },
+                        { 2, 1 },
+                        { 2, 2 },
+                        { 2, 1 } } },
+                { new int[]
+                    { 2, 2 }, new long[]
+                    { 4, 4 }, new float[]
+                    { 0f, 2f, 8f, 10f }, new int[][]
+                    {
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 2 } } },
+                { new int[]
+                    { 2, 2 }, new long[]
+                    { 4, 5 }, new float[]
+                    { 0f, 2f, 4f, 10f, 12f, 14f }, new int[][]
+                    {
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 1 },
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 1 } } },
+                { new int[]
+                    { 3, 2 }, new long[]
+                    { 5, 4 }, new float[]
+                    { 0f, 2f, 12f, 14f }, new int[][]
+                    {
+                        { 3, 2 },
+                        { 3, 2 },
+                        { 2, 2 },
+                        { 2, 2 } } },
+                { new int[]
+                    { 2, 2 }, new long[]
+                    { 5, 4 }, new float[]
+                    { 0f, 2f, 8f, 10f, 16f, 18f }, new int[][]
+                    {
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 2, 2 },
+                        { 1, 2 },
+                        { 1, 2 } } }, };
+    }
+
+    @Test(dataProvider = "provideMDSizes")
+    public void testIterateOverMDFloatArrayInNaturalBlocks(int[] blockSize, long[] dataSetSize,
+            float[] firstNumberPerIteration, int[][] blockSizePerIteration)
+    {
+        final File datasetFile =
+                new File(workingDirectory, "iterateOverMDFloatArrayInNaturalBlocks.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final float[] flattenedArray = new float[getNumberOfElements(dataSetSize)];
+        for (int i = 0; i < flattenedArray.length; ++i)
+        {
+            flattenedArray[i] = i;
+        }
+        final MDFloatArray arrayWritten = new MDFloatArray(flattenedArray, dataSetSize);
+        writer.float32().createMDArray(dsName, dataSetSize, blockSize);
+        writer.float32().writeMDArrayBlock(dsName, arrayWritten, new long[blockSize.length]);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        int i = 0;
+        for (HDF5MDDataBlock<MDFloatArray> block : reader.float32().getMDArrayNaturalBlocks(dsName))
+        {
+            assertEquals(firstNumberPerIteration[i], block.getData().get(0, 0));
+            assertTrue(Arrays.equals(block.getData().dimensions(), blockSizePerIteration[i]));
+            ++i;
+        }
+        assertEquals(firstNumberPerIteration.length, i);
+        reader.close();
+    }
+
+    private static int getNumberOfElements(long[] size)
+    {
+        int elements = 1;
+        for (long dim : size)
+        {
+            elements *= dim;
+        }
+        return elements;
+    }
+
+    @Test
+    public void testStringArray()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArray.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final String[] data = new String[]
+            { "abc", "ABCxxx", "xyz" };
+        final String dataSetName = "/aStringArray";
+        writer.writeStringArray(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(data, dataStored));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayUTF8()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayUTF8.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(stringArrayFile).useUTF8CharacterEncoding()
+                        .writer();
+        final String[] data = new String[]
+            { "abc", "ABCxxx", "\u00b6\u00bc\u09ab" };
+        final String dataSetName = "/aStringArray";
+        writer.writeStringArray(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(data, dataStored));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayUTF8WithZeroChar()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayUTF8WithZeroChar.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(stringArrayFile).useUTF8CharacterEncoding()
+                        .writer();
+        final String[] data = new String[]
+            { "abc", "ABCxxx", "\u00b6\000\u00bc\u09ab" };
+        final String dataSetName = "/aStringArray";
+        writer.writeStringArray(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final String[] dataStored = reader.string().readArrayRaw(dataSetName);
+        assertEquals(3, dataStored.length);
+        assertEquals(StringUtils.rightPad("abc", 8, "\0"), dataStored[0]);
+        assertEquals(StringUtils.rightPad("ABCxxx", 8, "\0"), dataStored[1]);
+        assertEquals("\u00b6\000\u00bc\u09ab", dataStored[2]);
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayBlock()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayBlock.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final String[] data = new String[]
+            { "abc\0A", "ABCxxx\0" + "1", "xyz\0;" };
+        final String[] dataZeroTerm = zeroTerm(data);
+        final String[] dataPadded = pad(data, 8);
+        final String emptyPadded = StringUtils.rightPad("", 8, '\0');
+        final String dataSetName = "/aStringArray";
+        writer.string().createArray(dataSetName, 8, 5, 3);
+        writer.string().writeArrayBlock(dataSetName, data, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        String[] dataStored = reader.string().readArray(dataSetName);
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "", dataZeroTerm[0], dataZeroTerm[1], dataZeroTerm[2] }, dataStored));
+
+        dataStored = reader.string().readArrayRaw(dataSetName);
+        assertTrue(Arrays.equals(new String[]
+            { emptyPadded, emptyPadded, emptyPadded, dataPadded[0], dataPadded[1], dataPadded[2] },
+                dataStored));
+
+        dataStored = reader.string().readArrayBlock(dataSetName, 3, 0);
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "" }, dataStored));
+        dataStored = reader.string().readArrayBlockRaw(dataSetName, 3, 0);
+        assertTrue(Arrays.equals(new String[]
+            { emptyPadded, emptyPadded, emptyPadded }, dataStored));
+        dataStored = reader.string().readArrayBlock(dataSetName, 3, 1);
+        assertTrue(Arrays.equals(dataZeroTerm, dataStored));
+        dataStored = reader.string().readArrayBlockRaw(dataSetName, 3, 1);
+        assertTrue(Arrays.equals(dataPadded, dataStored));
+        dataStored = reader.string().readArrayBlockWithOffset(dataSetName, 3, 2);
+        assertTrue(Arrays.equals(new String[]
+            { "", dataZeroTerm[0], dataZeroTerm[1] }, dataStored));
+        dataStored = reader.string().readArrayBlockWithOffsetRaw(dataSetName, 3, 2);
+        assertTrue(Arrays.equals(new String[]
+            { emptyPadded, dataPadded[0], dataPadded[1] }, dataStored));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayBlockCompact()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayBlockCompact.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final String[] data = new String[]
+            { "abc", "ABCxxx", "xyz" };
+        final String dataSetName = "/aStringArray";
+        writer.string().createArray(dataSetName, 6, 6, HDF5GenericStorageFeatures.GENERIC_COMPACT);
+        writer.string().writeArrayBlock(dataSetName, data, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "", data[0], data[1], data[2] }, dataStored));
+        dataStored = reader.string().readArrayBlock(dataSetName, 3, 0);
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "" }, dataStored));
+        dataStored = reader.string().readArrayBlock(dataSetName, 3, 1);
+        assertTrue(Arrays.equals(data, dataStored));
+        dataStored = reader.string().readArrayBlockWithOffset(dataSetName, 3, 2);
+        assertTrue(Arrays.equals(new String[]
+            { "", data[0], data[1] }, dataStored));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayBlockVL()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayBlockVL.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final String[] data1 = new String[]
+            { "abc", "ABCxxx", "xyz" };
+        final String[] data2 = new String[]
+            { "abd", "ABDxxx", "xyw" };
+        final String[] data = new String[]
+            { "", "", "", "abc", "ABCxxx", "xyz", "abd", "ABDxxx", "xyw" };
+        final String dataSetName = "/aStringArray";
+        writer.string().createArrayVL(dataSetName, 0, 5);
+        writer.string().writeArrayBlock(dataSetName, data1, 1);
+        writer.string().writeArrayBlock(dataSetName, data2, 2);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        String[] dataRead = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(data, dataRead));
+        dataRead = reader.string().readArrayBlock(dataSetName, 3, 1);
+        assertTrue(Arrays.equals(data1, dataRead));
+        dataRead = reader.string().readArrayBlock(dataSetName, 3, 2);
+        assertTrue(Arrays.equals(data2, dataRead));
+        dataRead = reader.string().readArrayBlockWithOffset(dataSetName, 3, 5);
+        assertTrue(Arrays.equals(new String[]
+            { "xyz", "abd", "ABDxxx" }, dataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayMD()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringMDArray.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final MDArray<String> data = new MDArray<String>(new String[]
+            { "abc", "ABCxxx", "xyz", "DEF" }, new long[]
+            { 2, 2 });
+        final String dataSetName = "/aStringArray";
+        writer.string().writeMDArray(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final MDArray<String> dataStored = reader.string().readMDArray(dataSetName);
+        assertTrue(Arrays.equals(data.getAsFlatArray(), dataStored.getAsFlatArray()));
+        assertTrue(Arrays.equals(data.dimensions(), dataStored.dimensions()));
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayMDBlocks()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringMDArrayBlocks.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final String dataSetName = "/aStringArray";
+        writer.string().createMDArray(dataSetName, 8, new long[]
+            { 4, 4 }, new int[]
+            { 2, 2 });
+        final MDArray<String> data = new MDArray<String>(new String[]
+            { "abc", "ABCxxx\0" + 1, "xyz\0;", "DEF\0" + 8 }, new long[]
+            { 2, 2 });
+        final MDArray<String> dataZeroTerm =
+                new MDArray<String>(zeroTerm(data.getAsFlatArray()), data.dimensions());
+        final MDArray<String> dataPadded =
+                new MDArray<String>(pad(data.getAsFlatArray(), 8), data.dimensions());
+        for (int i = 0; i < 2; ++i)
+        {
+            for (int j = 0; j < 2; ++j)
+            {
+                writer.string().writeMDArrayBlock(dataSetName, data, new long[]
+                    { i, j });
+            }
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        int i = 0;
+        int j = 0;
+        for (HDF5MDDataBlock<MDArray<String>> block : reader.string().getMDArrayNaturalBlocks(
+                dataSetName))
+        {
+            assertTrue(Arrays.equals(dataZeroTerm.getAsFlatArray(), block.getData()
+                    .getAsFlatArray()));
+            assertTrue(Arrays.equals(dataZeroTerm.dimensions(), block.getData().dimensions()));
+            assertTrue(Arrays.equals(new long[]
+                { i, j }, block.getIndex()));
+            if (++j > 1)
+            {
+                j = 0;
+                ++i;
+            }
+        }
+
+        i = 0;
+        j = 0;
+        for (HDF5MDDataBlock<MDArray<String>> block : reader.string().getMDArrayNaturalBlocksRaw(
+                dataSetName))
+        {
+            assertTrue(Arrays.equals(dataPadded.getAsFlatArray(), block.getData().getAsFlatArray()));
+            assertTrue(Arrays.equals(dataPadded.dimensions(), block.getData().dimensions()));
+            assertTrue(Arrays.equals(new long[]
+                { i, j }, block.getIndex()));
+            if (++j > 1)
+            {
+                j = 0;
+                ++i;
+            }
+        }
+        reader.close();
+    }
+
+    private String[] pad(String[] data, int len)
+    {
+        final String[] result = new String[data.length];
+        for (int i = 0; i < result.length; ++i)
+        {
+            result[i] = StringUtils.rightPad(data[i], len, '\0');
+        }
+        return result;
+    }
+
+    private String[] zeroTerm(String[] data)
+    {
+        final String[] result = new String[data.length];
+        for (int i = 0; i < result.length; ++i)
+        {
+            result[i] = zeroTerm(data[i]);
+        }
+        return result;
+    }
+
+    private String zeroTerm(String s)
+    {
+        int idx = s.indexOf('\0');
+        if (idx < 0)
+        {
+            return s;
+        } else
+        {
+            return s.substring(0, idx);
+        }
+    }
+
+    @Test
+    public void testStringMDArrayVL()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringMDArrayVL.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final MDArray<String> data = new MDArray<String>(new String[]
+            { "abc", "ABCxxx", "xyz", "DEF" }, new long[]
+            { 2, 2 });
+        final String dataSetName = "/aStringArray";
+        writer.string().writeMDArrayVL(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final MDArray<String> dataStored = reader.string().readMDArray(dataSetName);
+        assertTrue(Arrays.equals(data.getAsFlatArray(), dataStored.getAsFlatArray()));
+        assertTrue(Arrays.equals(data.dimensions(), dataStored.dimensions()));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        reader.close();
+        assertTrue(info.getTypeInformation().isVariableLengthString());
+        assertEquals("STRING(-1)", info.getTypeInformation().toString());
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        assertTrue(Arrays.toString(info.getDimensions()), Arrays.equals(new long[]
+            { 2, 2 }, info.getDimensions()));
+        assertTrue(Arrays.toString(info.getMaxDimensions()), Arrays.equals(new long[]
+            { -1, -1 }, info.getMaxDimensions()));
+        assertTrue(Arrays.toString(info.tryGetChunkSizes()),
+                Arrays.equals(MDArray.toInt(info.getDimensions()), info.tryGetChunkSizes()));
+    }
+
+    @Test
+    public void testStringMDArrayVLBlocks()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringMDArrayVLBlocks.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        final long[] dims = new long[]
+            { 8, 8 };
+        final int[] blockSize = new int[]
+            { 2, 2 };
+        final MDArray<String> data = new MDArray<String>(new String[]
+            { "abc", "ABCxxx", "xyz", "DEF" }, blockSize);
+        final String dataSetName = "/aStringArray";
+        writer.string().createMDArrayVL(dataSetName, dims, blockSize);
+        writer.string().writeMDArrayBlock(dataSetName, data, new long[]
+            { 1, 1 });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        final MDArray<String> dataStored =
+                reader.string().readMDArrayBlock(dataSetName, blockSize, new long[]
+                    { 1, 1 });
+        assertTrue(Arrays.equals(data.getAsFlatArray(), dataStored.getAsFlatArray()));
+        assertTrue(Arrays.equals(data.dimensions(), dataStored.dimensions()));
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "", "" }, reader.string().readMDArrayBlock(dataSetName, blockSize, new long[]
+            { 1, 0 }).getAsFlatArray()));
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "", "" }, reader.string().readMDArrayBlock(dataSetName, blockSize, new long[]
+            { 0, 1 }).getAsFlatArray()));
+        assertTrue(Arrays.equals(new String[]
+            { "", "", "", "" }, reader.string().readMDArrayBlock(dataSetName, blockSize, new long[]
+            { 2, 2 }).getAsFlatArray()));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        reader.close();
+        assertTrue(info.getTypeInformation().isVariableLengthString());
+        assertEquals("STRING(-1)", info.getTypeInformation().toString());
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        assertTrue(Arrays.equals(dims, info.getDimensions()));
+        assertTrue(Arrays.equals(new long[]
+            { -1, -1 }, info.getMaxDimensions()));
+        assertTrue(Arrays.equals(blockSize, info.tryGetChunkSizes()));
+    }
+
+    @Test
+    public void testOverwriteString()
+    {
+        final File stringOverwriteFile = new File(workingDirectory, "overwriteString.h5");
+        stringOverwriteFile.delete();
+        assertFalse(stringOverwriteFile.exists());
+        stringOverwriteFile.deleteOnExit();
+        IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(stringOverwriteFile)
+                        .dontUseExtendableDataTypes().writer();
+        final String largeData = StringUtils.repeat("a", 12);
+        final String smallData = "abc1234";
+        final String dataSetName = "/aString";
+        writer.string().write(dataSetName, smallData);
+        writer.string().write(dataSetName, largeData);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringOverwriteFile);
+        final String dataRead = reader.readString(dataSetName);
+        assertEquals(largeData, dataRead);
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteStringWithLarge()
+    {
+        final File stringOverwriteFile = new File(workingDirectory, "overwriteStringWithLarge.h5");
+        stringOverwriteFile.delete();
+        assertFalse(stringOverwriteFile.exists());
+        stringOverwriteFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(stringOverwriteFile).writer();
+        final String largeData = StringUtils.repeat("a", 64 * 1024);
+        final String smallData = "abc1234";
+        final String dataSetName = "/aString";
+        writer.string().write(dataSetName, smallData);
+        writer.string().write(dataSetName, largeData);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringOverwriteFile);
+        final String dataRead = reader.readString(dataSetName);
+        assertEquals(largeData, dataRead);
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteStringWithLargeKeepCompact()
+    {
+        final File stringOverwriteFile =
+                new File(workingDirectory, "overwriteStringWithLargeKeepCompact.h5");
+        stringOverwriteFile.delete();
+        assertFalse(stringOverwriteFile.exists());
+        stringOverwriteFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(stringOverwriteFile).writer();
+        final String largeData = StringUtils.repeat("a", 64 * 1024);
+        final String smallData = "abc1234";
+        final String dataSetName = "/aString";
+        writer.string().write(dataSetName, smallData);
+        writer.string().write(dataSetName, largeData,
+                HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS_KEEP);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringOverwriteFile);
+        final String dataRead = reader.readString(dataSetName);
+        assertEquals(largeData.substring(0, smallData.length()), dataRead);
+        assertEquals(HDF5StorageLayout.COMPACT, reader.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testSmallString()
+    {
+        final File smallStringFile = new File(workingDirectory, "smallString.h5");
+        smallStringFile.delete();
+        assertFalse(smallStringFile.exists());
+        smallStringFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(smallStringFile).writer();
+        final String dataSetName = "/aString";
+        writer.string().write(dataSetName, "abc");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(smallStringFile);
+        final String dataRead = reader.readString(dataSetName);
+        assertEquals("abc", dataRead);
+        assertEquals(HDF5StorageLayout.COMPACT, reader.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testVeryLargeString()
+    {
+        final File veryLargeStringFile = new File(workingDirectory, "veryLargeString.h5");
+        veryLargeStringFile.delete();
+        assertFalse(veryLargeStringFile.exists());
+        veryLargeStringFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(veryLargeStringFile).writer();
+        final String largeData = StringUtils.repeat("a", 64 * 1024);
+        final String dataSetName = "/aString";
+        writer.string().write(dataSetName, largeData);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(veryLargeStringFile);
+        final String dataRead = reader.readString(dataSetName);
+        assertEquals(largeData, dataRead);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, reader.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        reader.close();
+    }
+
+    @Test
+    public void testReadStringAsByteArray()
+    {
+        final File file = new File(workingDirectory, "readStringAsByteArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().write("a", "abc");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final byte[] b = r.readAsByteArray("a");
+        assertEquals("abc", new String(b));
+        r.close();
+    }
+
+    @Test
+    public void testReadStringVLAsByteArray()
+    {
+        final File file = new File(workingDirectory, "readStringVLAsByteArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().writeVL("a", "abc");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final byte[] b = r.readAsByteArray("a");
+        assertEquals("abc", new String(b));
+        r.close();
+    }
+
+    @Test
+    public void testReadStringAttributeAsByteArray()
+    {
+        final File file = new File(workingDirectory, "readStringAttributeAsByteArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setAttr("/", "a", "abc");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final byte[] b = r.opaque().getArrayAttr("/", "a");
+        assertEquals("abc", new String(b));
+        r.close();
+    }
+
+    @Test
+    public void testStringAttributeFixedLength()
+    {
+        final File file = new File(workingDirectory, "stringAttributeFixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setAttr("/", "a", "a\0c");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String b = r.string().getAttrRaw("/", "a");
+        assertEquals("a\0c", b);
+        r.close();
+    }
+
+    @Test
+    public void testStringAttributeLength0()
+    {
+        final File file = new File(workingDirectory, "stringAttributeLength0.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setAttr("/", "a", "");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String b = r.string().getAttr("/", "a");
+        assertEquals("", b);
+        r.close();
+    }
+
+    @Test
+    public void testStringAttributeFixedLengthExplicitlySaveLength()
+    {
+        final File file =
+                new File(workingDirectory, "stringAttributeFixedLengthExplicitlySaveLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setAttr("/", "a", "a\0c");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        assertEquals("a\0c", r.string().getAttrRaw("/", "a"));
+        assertEquals("a", r.string().getAttr("/", "a"));
+        r.close();
+    }
+
+    @Test
+    public void testStringAttributeFixedLengthOverwriteWithShorter()
+    {
+        final File file =
+                new File(workingDirectory, "stringAttributeFixedLengthOverwriteWithShorter.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setAttr("/", "a", "abcdef");
+        // This will delete the old attribute and write a new one with length 3.
+        w.string().setAttr("/", "a", "ghi");
+        w.string().setAttr("/", "b", "abcdef", 6);
+        // This will keep the old attribute (of length 6) and just overwrite its value.
+        w.string().setAttr("/", "b", "jkl", 6);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        assertEquals("ghi", r.string().getAttrRaw("/", "a"));
+        assertEquals("jkl\0\0\0", r.string().getAttrRaw("/", "b"));
+        r.close();
+    }
+
+    @Test
+    public void testStringAttributeUTF8FixedLength()
+    {
+        final File file = new File(workingDirectory, "stringAttributeUTF8FixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        w.string().setAttr("/", "a", "a\0c");
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        assertEquals("a\0c", r.string().getAttrRaw("/", "a"));
+        r.close();
+    }
+
+    @Test
+    public void testStringArrayAttributeLengthFitsValue()
+    {
+        final File file = new File(workingDirectory, "stringArrayAttributeLengthFitsValue.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setArrayAttr("/", "a", new String[]
+            { "12", "a\0c", "QWERTY" });
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String[] s0Term = r.string().getArrayAttr("/", "a");
+        final String[] sFixed = r.string().getArrayAttrRaw("/", "a");
+        assertTrue(Arrays.equals(new String[]
+            { "12", "a", "QWERTY" }, s0Term));
+        assertTrue(Arrays.equals(new String[]
+            { "12\0\0\0\0", "a\0c\0\0\0", "QWERTY" }, sFixed));
+        r.close();
+    }
+
+    @Test
+    public void testStringArrayAttributeFixedLength()
+    {
+        final File file = new File(workingDirectory, "stringArrayAttributeFixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        w.string().setArrayAttr("/", "a", new String[]
+            { "12", "a\0c", "QWERTY" }, 7);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String[] s0Term = r.string().getArrayAttr("/", "a");
+        final String[] sFixed = r.string().getArrayAttrRaw("/", "a");
+        assertTrue(Arrays.equals(new String[]
+            { "12", "a", "QWERTY" }, s0Term));
+        assertTrue(Arrays.equals(new String[]
+            { "12\0\0\0\0\0", "a\0c\0\0\0\0", "QWERTY\0" }, sFixed));
+        r.close();
+    }
+
+    @Test
+    public void testStringArrayAttributeUTF8LengthFitsValue()
+    {
+        final File file = new File(workingDirectory, "stringArrayAttributeUTF8LengthFitsValue.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        w.string().setArrayAttr("/", "a", new String[]
+            { "12", "a\0c", "QWERTY" });
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String[] b = r.string().getArrayAttrRaw("/", "a");
+        assertTrue(Arrays.equals(new String[]
+            { "12\0\0\0\0", "a\0c\0\0\0", "QWERTY" }, b));
+        r.close();
+    }
+
+    @Test
+    public void testStringArrayAttributeUTF8FixedLength()
+    {
+        final File file = new File(workingDirectory, "stringArrayAttributeUTF8FixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        w.string().setArrayAttr("/", "a", new String[]
+            { "12", "a\0c", "QWERTY" }, 7);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final String[] sFixed = r.string().getArrayAttrRaw("/", "a");
+        final String[] s0Term = r.string().getArrayAttr("/", "a");
+        assertTrue(Arrays.equals(new String[]
+            { StringUtils.rightPad("12", 7 * 4, '\0'), StringUtils.rightPad("a\0c", 7 * 4, '\0'),
+                    StringUtils.rightPad("QWERTY", 7 * 4, '\0') }, sFixed));
+        assertTrue(Arrays.equals(new String[]
+            { "12", "a", "QWERTY" }, s0Term));
+        r.close();
+    }
+
+    @Test
+    public void testStringMDArrayAttributeFixedLength()
+    {
+        final File file = new File(workingDirectory, "stringMDArrayAttributeFixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.open(file);
+        final MDArray<String> array = new MDArray<String>(new String[]
+            { "12", "a\0c", "QWERTY", "" }, new int[]
+            { 2, 2 });
+        w.string().setMDArrayAttr("/", "a", array, 7);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final MDArray<String> s0Term = r.string().getMDArrayAttr("/", "a");
+        final MDArray<String> sFixed = r.string().getMDArrayAttrRaw("/", "a");
+        assertEquals(new MDArray<String>(new String[]
+            { "12", "a", "QWERTY", "" }, new int[]
+            { 2, 2 }), s0Term);
+        assertEquals(new MDArray<String>(new String[]
+            { "12\0\0\0\0\0", "a\0c\0\0\0\0", "QWERTY\0", "\0\0\0\0\0\0\0" }, new int[]
+            { 2, 2 }), sFixed);
+        r.close();
+    }
+
+    @Test
+    public void testStringMDArrayAttributeUTF8LengthFitsValue()
+    {
+        final File file =
+                new File(workingDirectory, "stringMDArrayAttributeUTF8LengthFitsValue.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        final MDArray<String> array = new MDArray<String>(new String[]
+            { "\u00b6\u00bc\u09ab", "a\0c", "QWERTY", "" }, new int[]
+            { 2, 2 });
+        w.string().setMDArrayAttr("/", "a", array);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final MDArray<String> b1 = r.string().getMDArrayAttr("/", "a");
+        assertEquals(new MDArray<String>(new String[]
+            { "\u00b6\u00bc\u09ab", "a", "QWERTY", "" }, new int[]
+            { 2, 2 }), b1);
+        final MDArray<String> b2 = r.string().getMDArrayAttrRaw("/", "a");
+        assertEquals(new MDArray<String>(new String[]
+            { "\u00b6\u00bc\u09ab", "a\0c\0\0\0\0", "QWERTY\0", "\0\0\0\0\0\0\0" }, new int[]
+            { 2, 2 }), b2);
+        r.close();
+    }
+
+    @Test
+    public void testStringMDArrayAttributeUTF8FixedLength()
+    {
+        final File file = new File(workingDirectory, "stringMDArrayAttributeUTF8FixedLength.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer w = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        final MDArray<String> array = new MDArray<String>(new String[]
+            { "\u00b6\u00bc\u09ab", "a\0c", "QWERTY", "" }, new int[]
+            { 2, 2 });
+        w.string().setMDArrayAttr("/", "a", array, 7);
+        w.close();
+        final IHDF5Reader r = HDF5Factory.openForReading(file);
+        final MDArray<String> b1 = r.string().getMDArrayAttr("/", "a");
+        assertEquals(new MDArray<String>(new String[]
+            { "\u00b6\u00bc\u09ab", "a", "QWERTY", "" }, new int[]
+            { 2, 2 }), b1);
+        final MDArray<String> b2 = r.string().getMDArrayAttrRaw("/", "a");
+        // Note: the first string contains 28 bytes, but uses 7 bytes to encode 3 characters, thus
+        // it has only 28 - (7-3) = 24 characters.
+        assertEquals(
+                new MDArray<String>(new String[]
+                    { StringUtils.rightPad("\u00b6\u00bc\u09ab", 7 * 4 - 4, '\0'),
+                            StringUtils.rightPad("a\0c", 7 * 4, '\0'),
+                            StringUtils.rightPad("QWERTY", 7 * 4, '\0'),
+                            StringUtils.rightPad("", 7 * 4, '\0') }, new int[]
+                    { 2, 2 }), b2);
+        r.close();
+    }
+
+    @Test
+    public void testStringCompact()
+    {
+        final File stringCompactFile = new File(workingDirectory, "stringCompact.h5");
+        stringCompactFile.delete();
+        assertFalse(stringCompactFile.exists());
+        stringCompactFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(stringCompactFile).writer();
+        final String smallData = "abc1234";
+        final String dataSetName1 = "/aString";
+        writer.string().write(dataSetName1, smallData, HDF5GenericStorageFeatures.GENERIC_COMPACT);
+        final String dataSetName2 = "/anotherString";
+        final String largeData = StringUtils.repeat("a", 64 * 1024 - 13);
+        writer.string().write(dataSetName2, largeData, HDF5GenericStorageFeatures.GENERIC_COMPACT);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringCompactFile);
+        final String dataRead1 = reader.readString(dataSetName1);
+        assertEquals(HDF5StorageLayout.COMPACT, reader.getDataSetInformation(dataSetName1)
+                .getStorageLayout());
+        assertEquals(smallData, dataRead1);
+        final String dataRead2 = reader.readString(dataSetName2);
+        assertEquals(HDF5StorageLayout.COMPACT, reader.getDataSetInformation(dataSetName2)
+                .getStorageLayout());
+        assertEquals(largeData, dataRead2);
+        reader.close();
+    }
+
+    @Test
+    public void testStringContiguous()
+    {
+        final File stringCompactFile = new File(workingDirectory, "stringContiguous.h5");
+        stringCompactFile.delete();
+        assertFalse(stringCompactFile.exists());
+        stringCompactFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().configure(stringCompactFile).writer();
+        final String smallData = "abc1234";
+        final String dataSetName1 = "/aString";
+        writer.string().write(dataSetName1, smallData,
+                HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS);
+        final String dataSetName2 = "/anotherString";
+        final String largeData = StringUtils.repeat("a", 64 * 1024 - 13);
+        writer.string().write(dataSetName2, largeData,
+                HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringCompactFile);
+        final String dataRead1 = reader.readString(dataSetName1);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, reader.getDataSetInformation(dataSetName1)
+                .getStorageLayout());
+        assertEquals(smallData, dataRead1);
+        final String dataRead2 = reader.readString(dataSetName2);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, reader.getDataSetInformation(dataSetName2)
+                .getStorageLayout());
+        assertEquals(largeData, dataRead2);
+        reader.close();
+    }
+
+    @Test
+    public void testStringUnicode() throws Exception
+    {
+        final File stringUnicodeFile = new File(workingDirectory, "stringUnicode.h5");
+        stringUnicodeFile.delete();
+        assertFalse(stringUnicodeFile.exists());
+        stringUnicodeFile.deleteOnExit();
+        IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(stringUnicodeFile).dontUseExtendableDataTypes()
+                        .useUTF8CharacterEncoding().writer();
+        final String uniCodeData = "\u00b6\u00bc\u09ab";
+        final String dataSetName = "/aString";
+        final String attributeName = "attr1";
+        final String uniCodeAttributeData = "\u09bb";
+        writer.string().write(dataSetName, uniCodeData);
+        writer.string().setAttr(dataSetName, attributeName, uniCodeAttributeData);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringUnicodeFile);
+        final String dataRead = reader.readString(dataSetName);
+        final String attributeDataRead = reader.string().getAttr(dataSetName, attributeName);
+        assertEquals(uniCodeData, dataRead);
+        assertEquals(uniCodeAttributeData, attributeDataRead);
+        reader.close();
+    }
+
+    @Test
+    public void testStringArrayCompact()
+    {
+        final File stringArrayFile = new File(workingDirectory, "stringArrayCompact.h5");
+        stringArrayFile.delete();
+        assertFalse(stringArrayFile.exists());
+        stringArrayFile.deleteOnExit();
+        IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(stringArrayFile).dontUseExtendableDataTypes()
+                        .writer();
+        final String[] data = new String[]
+            { "abc1234", "ABCxxxX", "xyzUVWX" };
+        final String dataSetName = "/aStringArray";
+        writer.writeStringArray(dataSetName, data);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(stringArrayFile);
+        writer.writeStringArray(dataSetName, new String[]
+            { data[0], data[1] });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(stringArrayFile);
+        String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(new String[]
+            { data[0], data[1] }, dataStored));
+        reader.close();
+    }
+
+    @Test
+    public void testStringCompression()
+    {
+        final File compressedStringFile = new File(workingDirectory, "compressedStrings.h5");
+        compressedStringFile.delete();
+        assertFalse(compressedStringFile.exists());
+        compressedStringFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(compressedStringFile);
+        final int size = 100000;
+        final String dataSetName = "/hopefullyCompressedString";
+        final String longMonotonousString = StringUtils.repeat("a", size);
+        writer.string().write(dataSetName, longMonotonousString, GENERIC_DEFLATE);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(compressedStringFile);
+        final String longMonotonousStringStored = reader.readString(dataSetName);
+        assertEquals(longMonotonousString, longMonotonousStringStored);
+        reader.close();
+        assertTrue(Long.toString(compressedStringFile.length()),
+                compressedStringFile.length() < size / 10);
+    }
+
+    @Test
+    public void testStringArrayCompression()
+    {
+        final File compressedStringArrayFile =
+                new File(workingDirectory, "compressedStringArray.h5");
+        compressedStringArrayFile.delete();
+        assertFalse(compressedStringArrayFile.exists());
+        compressedStringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(compressedStringArrayFile);
+        final int size = 100000;
+        final String longMonotonousString = StringUtils.repeat("a", size);
+        final String[] data = new String[]
+            { longMonotonousString, longMonotonousString, longMonotonousString };
+        final String dataSetName = "/aHopeFullyCompressedStringArray";
+        writer.string().writeArray(dataSetName, data, size, GENERIC_DEFLATE);
+        writer.close();
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().openForReading(compressedStringArrayFile);
+        final String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(data, dataStored));
+        reader.close();
+        assertTrue(Long.toString(compressedStringArrayFile.length()),
+                compressedStringArrayFile.length() < 3 * size / 10);
+    }
+
+    @Test
+    public void testStringVLArray()
+    {
+        final File compressedStringArrayFile = new File(workingDirectory, "StringVLArray.h5");
+        compressedStringArrayFile.delete();
+        assertFalse(compressedStringArrayFile.exists());
+        compressedStringArrayFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(compressedStringArrayFile);
+        final int size = 100000;
+        final String longMonotonousString = StringUtils.repeat("a", size);
+        final String[] data = new String[]
+            { longMonotonousString, longMonotonousString, longMonotonousString };
+        final String dataSetName = "/aHopeFullyCompressedStringArray";
+        writer.string().writeArrayVL(dataSetName, data);
+        writer.close();
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().openForReading(compressedStringArrayFile);
+        final String[] dataStored = reader.readStringArray(dataSetName);
+        assertTrue(Arrays.equals(data, dataStored));
+        assertTrue(reader.getDataSetInformation(dataSetName).getTypeInformation()
+                .isVariableLengthString());
+        reader.close();
+    }
+
+    private void assertMatrixEquals(final float[][] floatMatrixWritten,
+            final float[][] floatMatrixRead)
+    {
+        assertEquals(floatMatrixWritten.length, floatMatrixRead.length);
+        for (int i = 0; i < floatMatrixWritten.length; ++i)
+        {
+            assertEquals(floatMatrixWritten[i].length, floatMatrixRead[i].length);
+            for (int j = 0; j < floatMatrixWritten[i].length; ++j)
+            {
+                assertEquals(i + ":" + j, floatMatrixWritten[i][j], floatMatrixRead[i][j]);
+            }
+        }
+    }
+
+    @Test
+    public void testCompressedDataSet()
+    {
+        final File datasetFile = new File(workingDirectory, "compressed.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String stringDatasetName = "/compressed";
+        final StringBuilder b = new StringBuilder();
+        for (int i = 0; i < 10000; ++i)
+        {
+            b.append("easyToCompress");
+        }
+        writer.int8().writeArray(stringDatasetName, b.toString().getBytes(), INT_DEFLATE);
+        writer.close();
+    }
+
+    @Test
+    public void testCreateEmptyFloatMatrix()
+    {
+        final File datasetFile = new File(workingDirectory, "initiallyEmptyFloatMatrix.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/emptyMatrix";
+        writer.float32().createMatrix(floatDatasetName, 2, 2, FLOAT_CHUNKED);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        float[][] floatMatrixRead = writer.float32().readMatrix(floatDatasetName);
+        assertEquals(0, floatMatrixRead.length);
+
+        // No write a non-empty matrix
+        float[][] floatMatrixWritten = new float[][]
+            {
+                { 1f, 2f, 3f },
+                { 4f, 5f, 6f },
+                { 7f, 8f, 9f } };
+        writer.float32().writeMatrix(floatDatasetName, floatMatrixWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        floatMatrixRead = reader.float32().readMatrix(floatDatasetName);
+        assertTrue(equals(floatMatrixWritten, floatMatrixRead));
+        reader.close();
+    }
+
+    @Test
+    public void testFloatVectorLength1()
+    {
+        final File datasetFile = new File(workingDirectory, "singleFloatVector.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/singleFloat";
+        final float[] floatDataWritten = new float[]
+            { 1.0f };
+        writer.float32().writeArray(floatDatasetName, floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        reader.object().hasAttribute(floatDatasetName, "flag");
+        final float[] floatDataRead = reader.float32().readArray(floatDatasetName);
+        assertTrue(Arrays.equals(floatDataWritten, floatDataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testFloatMatrixLength1()
+    {
+        final File datasetFile = new File(workingDirectory, "singleFloatMatrix.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/singleFloat";
+        final float[][] floatDataWritten = new float[][]
+            {
+                { 1.0f } };
+        writer.float32().writeMatrix(floatDatasetName, floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] floatDataRead = reader.float32().readMatrix(floatDatasetName);
+        assertTrue(equals(floatDataWritten, floatDataRead));
+        reader.close();
+    }
+
+    @Test
+    public void testOneRowFloatMatrix()
+    {
+        final File datasetFile = new File(workingDirectory, "oneRowFloatMatrix.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/singleFloat";
+        final float[][] floatDataWritten = new float[][]
+            {
+                { 1.0f, 2.0f } };
+        writer.float32().writeMatrix(floatDatasetName, floatDataWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] floatDataRead = reader.float32().readMatrix(floatDatasetName);
+        assertTrue(equals(floatDataWritten, floatDataRead));
+        reader.close();
+    }
+
+    private static boolean equals(float[][] a, float[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (Float.floatToIntBits(a[i][j]) != Float.floatToIntBits(a2[i][j]))
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    @Test
+    public void testEmptyVectorDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "emptyVectorDatasets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/float";
+        writer.float32().writeArray(floatDatasetName, new float[0]);
+        final String doubleDatasetName = "/double";
+        writer.float64().writeArray(doubleDatasetName, new double[0]);
+        final String byteDatasetName = "byte";
+        writer.int8().writeArray(byteDatasetName, new byte[0]);
+        final String shortDatasetName = "/short";
+        writer.int16().writeArray(shortDatasetName, new short[0]);
+        final String intDatasetName = "/int";
+        writer.int32().writeArray(intDatasetName, new int[0]);
+        final String longDatasetName = "/long";
+        writer.int64().writeArray(longDatasetName, new long[0]);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(HDF5ObjectType.DATASET, reader.object().getObjectType(floatDatasetName));
+        assertTrue(reader.float32().readArray(floatDatasetName).length == 0);
+        assertTrue(reader.float64().readArray(doubleDatasetName).length == 0);
+        assertTrue(reader.int8().readArray(byteDatasetName).length == 0);
+        assertTrue(reader.int16().readArray(shortDatasetName).length == 0);
+        assertTrue(reader.int32().readArray(intDatasetName).length == 0);
+        assertTrue(reader.int64().readArray(longDatasetName).length == 0);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyVectorDataSetsContiguous()
+    {
+        final File datasetFile = new File(workingDirectory, "emptyVectorDatasetsContiguous.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).dontUseExtendableDataTypes()
+                        .writer();
+        final String floatDatasetName = "/float";
+        writer.float32().writeArray(floatDatasetName, new float[0]);
+        final String doubleDatasetName = "/double";
+        writer.float64().writeArray(doubleDatasetName, new double[0]);
+        final String byteDatasetName = "byte";
+        writer.int8().writeArray(byteDatasetName, new byte[0]);
+        final String shortDatasetName = "/short";
+        writer.int16().writeArray(shortDatasetName, new short[0]);
+        final String intDatasetName = "/int";
+        writer.int32().writeArray(intDatasetName, new int[0]);
+        final String longDatasetName = "/long";
+        writer.int64().writeArray(longDatasetName, new long[0]);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(HDF5ObjectType.DATASET, reader.object().getObjectType(floatDatasetName));
+        assertTrue(reader.float32().readArray(floatDatasetName).length == 0);
+        assertTrue(reader.float64().readArray(doubleDatasetName).length == 0);
+        assertTrue(reader.int8().readArray(byteDatasetName).length == 0);
+        assertTrue(reader.int16().readArray(shortDatasetName).length == 0);
+        assertTrue(reader.int32().readArray(intDatasetName).length == 0);
+        assertTrue(reader.int64().readArray(longDatasetName).length == 0);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyVectorDataSetsCompact()
+    {
+        final File datasetFile = new File(workingDirectory, "emptyVectorDatasetsCompact.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/float";
+        writer.float32().writeArray(floatDatasetName, new float[0],
+                HDF5FloatStorageFeatures.FLOAT_COMPACT);
+        final String doubleDatasetName = "/double";
+        writer.float64().writeArray(doubleDatasetName, new double[0],
+                HDF5FloatStorageFeatures.FLOAT_COMPACT);
+        final String byteDatasetName = "byte";
+        writer.int8().writeArray(byteDatasetName, new byte[0], HDF5IntStorageFeatures.INT_COMPACT);
+        final String shortDatasetName = "/short";
+        writer.int16().writeArray(shortDatasetName, new short[0],
+                HDF5IntStorageFeatures.INT_COMPACT);
+        final String intDatasetName = "/int";
+        writer.int32().writeArray(intDatasetName, new int[0], HDF5IntStorageFeatures.INT_COMPACT);
+        final String longDatasetName = "/long";
+        writer.int64().writeArray(longDatasetName, new long[0], HDF5IntStorageFeatures.INT_COMPACT);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(HDF5ObjectType.DATASET, reader.object().getObjectType(floatDatasetName));
+        assertTrue(reader.float32().readArray(floatDatasetName).length == 0);
+        assertTrue(reader.float64().readArray(doubleDatasetName).length == 0);
+        assertTrue(reader.int8().readArray(byteDatasetName).length == 0);
+        assertTrue(reader.int16().readArray(shortDatasetName).length == 0);
+        assertTrue(reader.int32().readArray(intDatasetName).length == 0);
+        assertTrue(reader.int64().readArray(longDatasetName).length == 0);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyMatrixDataSets()
+    {
+        final File datasetFile = new File(workingDirectory, "emptyMatrixDatasets.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String floatDatasetName = "/float";
+        writer.float32().writeMatrix(floatDatasetName, new float[0][0]);
+        final String doubleDatasetName = "/double";
+        writer.float64().writeMatrix(doubleDatasetName, new double[1][0]);
+        final String byteDatasetName = "byte";
+        writer.int8().writeMatrix(byteDatasetName, new byte[2][0]);
+        final String shortDatasetName = "/short";
+        writer.int16().writeMatrix(shortDatasetName, new short[3][0]);
+        final String intDatasetName = "/int";
+        writer.int32().writeMatrix(intDatasetName, new int[4][0]);
+        final String longDatasetName = "/long";
+        writer.int64().writeMatrix(longDatasetName, new long[5][0]);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(isEmpty(reader.float32().readMatrix(floatDatasetName)));
+        assertTrue(isEmpty(reader.float64().readMatrix(doubleDatasetName)));
+        assertTrue(isEmpty(reader.int8().readMatrix(byteDatasetName)));
+        assertTrue(isEmpty(reader.int16().readMatrix(shortDatasetName)));
+        assertTrue(isEmpty(reader.int32().readMatrix(intDatasetName)));
+        assertTrue(isEmpty(reader.int64().readMatrix(longDatasetName)));
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyMatrixDataSetsContiguous()
+    {
+        final File datasetFile = new File(workingDirectory, "emptyMatrixDatasetsContiguous.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(datasetFile).dontUseExtendableDataTypes()
+                        .writer();
+        final String floatDatasetName = "/float";
+        writer.float32().writeMatrix(floatDatasetName, new float[0][0]);
+        final String doubleDatasetName = "/double";
+        writer.float64().writeMatrix(doubleDatasetName, new double[1][0]);
+        final String byteDatasetName = "byte";
+        writer.int8().writeMatrix(byteDatasetName, new byte[2][0]);
+        final String shortDatasetName = "/short";
+        writer.int16().writeMatrix(shortDatasetName, new short[3][0]);
+        final String intDatasetName = "/int";
+        writer.int32().writeMatrix(intDatasetName, new int[4][0]);
+        final String longDatasetName = "/long";
+        writer.int64().writeMatrix(longDatasetName, new long[5][0]);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertTrue(isEmpty(reader.float32().readMatrix(floatDatasetName)));
+        assertTrue(isEmpty(reader.float64().readMatrix(doubleDatasetName)));
+        assertTrue(isEmpty(reader.int8().readMatrix(byteDatasetName)));
+        assertTrue(isEmpty(reader.int16().readMatrix(shortDatasetName)));
+        assertTrue(isEmpty(reader.int32().readMatrix(intDatasetName)));
+        assertTrue(isEmpty(reader.int64().readMatrix(longDatasetName)));
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteVectorIncreaseSize()
+    {
+        final File datasetFile = new File(workingDirectory, "resizableVector.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final float[] firstVector = new float[]
+            { 1f, 2f, 3f };
+        writer.float32().writeArray(dsName, firstVector);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final float[] secondVector = new float[]
+            { 1f, 2f, 3f, 4f };
+        writer.float32().writeArray(dsName, secondVector);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[] vectorRead = reader.float32().readArray(dsName);
+        reader.close();
+        assertTrue(Arrays.equals(secondVector, vectorRead));
+    }
+
+    @Test
+    public void testOverwriteWithEmptyVector()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteVector1.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final byte[] firstVector = new byte[]
+            { 1, 2, 3 };
+        writer.int8().writeArray(dsName, firstVector);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final byte[] emptyVector = new byte[0];
+        writer.int8().writeArray(dsName, emptyVector);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] vectorRead = reader.int8().readArray(dsName);
+        reader.close();
+        assertTrue(Arrays.equals(emptyVector, vectorRead));
+    }
+
+    @Test
+    public void testOverwriteEmptyVectorWithNonEmptyVector()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteVector2.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final byte[] emptyVector = new byte[0];
+        writer.int8().writeArray(dsName, emptyVector);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final byte[] nonEmptyVector = new byte[]
+            { 1 };
+        writer.int8().writeArray(dsName, nonEmptyVector);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final byte[] vectorRead = reader.int8().readArray(dsName);
+        reader.close();
+        assertTrue(Arrays.equals(nonEmptyVector, vectorRead));
+    }
+
+    @Test
+    public void testDeleteVector()
+    {
+        final File datasetFile = new File(workingDirectory, "deleteVector.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        try
+        {
+            final String dsName = "/vector";
+            final byte[] firstVector = new byte[]
+                { 1, 2, 3 };
+            writer.int8().writeArray(dsName, firstVector);
+            writer.close();
+            writer = HDF5FactoryProvider.get().open(datasetFile);
+            writer.delete(dsName.substring(1));
+        } finally
+        {
+            writer.close();
+        }
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        try
+        {
+            final List<String> members = reader.object().getAllGroupMembers("/");
+            assertEquals(1, members.size());
+            assertEquals(HDF5Utils.getDataTypeGroup("").substring(1), members.get(0));
+        } finally
+        {
+            reader.close();
+        }
+    }
+
+    @Test
+    public void testDeleteGroup()
+    {
+        final File datasetFile = new File(workingDirectory, "deleteGroup.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        try
+        {
+            final String groupName = "/group";
+            final String dsName = groupName + "/vector";
+            final byte[] firstVector = new byte[]
+                { 1, 2, 3 };
+            writer.int8().writeArray(dsName, firstVector);
+            writer.close();
+            writer = HDF5FactoryProvider.get().open(datasetFile);
+            writer.delete(groupName);
+        } finally
+        {
+            writer.close();
+        }
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        try
+        {
+            final List<String> members = reader.object().getAllGroupMembers("/");
+            assertEquals(1, members.size());
+            assertEquals(HDF5Utils.getDataTypeGroup("").substring(1), members.get(0));
+            assertEquals(0, reader.getGroupMembers("/").size());
+        } finally
+        {
+            reader.close();
+        }
+    }
+
+    @Test
+    public void testRenameLink()
+    {
+        final File file = new File(workingDirectory, "renameLink.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.writeBoolean("/some/boolean/value", true);
+        writer.object().move("/some/boolean/value", "/a/new/home");
+        assertFalse(writer.exists("/home/boolean/value"));
+        assertTrue(writer.exists("/a/new/home"));
+        writer.close();
+    }
+
+    @Test(expectedExceptions = HDF5SymbolTableException.class)
+    public void testRenameLinkOverwriteFails()
+    {
+        final File file = new File(workingDirectory, "renameLinkOverwriteFails.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.writeBoolean("/some/boolean/value", true);
+        writer.int32().write("/a/new/home", 4);
+        writer.object().move("/some/boolean/value", "/a/new/home");
+        writer.close();
+    }
+
+    @Test(expectedExceptions = HDF5SymbolTableException.class)
+    public void testRenameLinkSrcNonExistentFails()
+    {
+        final File file = new File(workingDirectory, "renameLinkSrcNonExistentFails.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.object().move("/some/boolean/value", "/a/new/home");
+        writer.close();
+    }
+
+    @Test
+    public void testOverwriteKeepWithEmptyString()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteWithEmtpyString.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/string";
+        writer.string().write(dsName, "non-empty");
+        writer.close();
+        writer =
+                HDF5FactoryProvider.get().configure(datasetFile).keepDataSetsIfTheyExist().writer();
+        writer.string().write(dsName, "");
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String stringRead = reader.readString(dsName);
+        reader.close();
+        assertEquals("", stringRead);
+    }
+
+    @Test
+    public void testOverwriteKeepWithShorterString()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteWithShorterString.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/string";
+        writer.string().write(dsName, "non-empty");
+        writer.close();
+        writer =
+                HDF5FactoryProvider.get().configure(datasetFile).keepDataSetsIfTheyExist().writer();
+        writer.string().write(dsName, "non");
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String stringRead = reader.readString(dsName);
+        reader.close();
+        assertEquals("non", stringRead);
+    }
+
+    @Test
+    public void testOverwriteKeepWithLongerString()
+    {
+        final File datasetFile = new File(workingDirectory, "overwriteWithLongerString.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/string";
+        writer.string().write(dsName, "non-empty");
+        writer.close();
+        writer =
+                HDF5FactoryProvider.get().configure(datasetFile).keepDataSetsIfTheyExist().writer();
+        writer.string().write(dsName, "0123456789");
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String stringRead = reader.readString(dsName);
+        reader.close();
+        assertEquals("012345678", stringRead);
+    }
+
+    @Test
+    public void testReplaceWithLongerString()
+    {
+        final File datasetFile = new File(workingDirectory, "replaceWithLongerString.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/string";
+        writer.string().write(dsName, "non-empty");
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.string().write(dsName, "0123456789");
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String stringRead = reader.readString(dsName);
+        reader.close();
+        assertEquals("0123456789", stringRead);
+    }
+
+    @Test
+    public void testOverwriteMatrixIncreaseSize()
+    {
+        final File datasetFile = new File(workingDirectory, "resizableMatrix.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/matrix";
+        final float[][] firstMatrix = new float[][]
+            {
+                { 1f, 2f, 3f },
+                { 4f, 5f, 6f } };
+        writer.float32().writeMatrix(dsName, firstMatrix);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final float[][] secondMatrix = new float[][]
+            {
+                { 1f, 2f, 3f, 4f },
+                { 5f, 6f, 7f, 8f },
+                { 9f, 10f, 11f, 12f } };
+        writer.float32().writeMatrix(dsName, secondMatrix);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final float[][] matrixRead = reader.float32().readMatrix(dsName);
+        reader.close();
+        assertMatrixEquals(secondMatrix, matrixRead);
+    }
+
+    @Test
+    public void testOverwriteStringVectorDecreaseSize()
+    {
+        final File datasetFile = new File(workingDirectory, "resizableStringVector.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final String[] firstVector = new String[]
+            { "a", "b", "c" };
+        writer.writeStringArray(dsName, firstVector);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String[] secondVector = new String[]
+            { "a", "b" };
+        writer.writeStringArray(dsName, secondVector);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String[] vectorRead = reader.readStringArray(dsName);
+        reader.close();
+        assertTrue(Arrays.equals(secondVector, vectorRead));
+    }
+
+    private static boolean isEmpty(Object matrix)
+    {
+        Object maybeAnArray = matrix;
+        do
+        {
+            if (Array.getLength(maybeAnArray) == 0)
+            {
+                return true;
+            }
+            maybeAnArray = Array.get(maybeAnArray, 0);
+        } while (maybeAnArray.getClass().isArray());
+        return false;
+    }
+
+    @Test
+    public void testStringArrayWithNullStrings()
+    {
+        final File datasetFile = new File(workingDirectory, "stringArrayWithNullStrings.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final String[] array = new String[]
+            { "a\0c", "b", "" };
+        writer.writeStringArray(dsName, array);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final String[] arrayRead = reader.string().readArrayRaw(dsName);
+        reader.close();
+        assertEquals(array.length, arrayRead.length);
+        assertEquals("a\0c", arrayRead[0]);
+        assertEquals(StringUtils.rightPad("b", 3, '\0'), arrayRead[1]);
+        assertEquals(StringUtils.rightPad("", 3, '\0'), arrayRead[2]);
+    }
+
+    @Test
+    public void testStringMDArrayWithNullStrings()
+    {
+        final File datasetFile = new File(workingDirectory, "stringMDArrayWithNullStrings.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "/vector";
+        final String[] array = new String[]
+            { "a\0c", "b", "", "\000123456" };
+        final MDArray<String> mdArray = new MDArray<String>(array, new int[]
+            { 2, 2 });
+        writer.string().writeMDArray(dsName, mdArray);
+        writer.close();
+        IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final MDArray<String> arrayRead = reader.string().readMDArrayRaw(dsName);
+        assertTrue(Arrays.equals(mdArray.dimensions(), arrayRead.dimensions()));
+        assertEquals(StringUtils.rightPad(mdArray.get(0, 0), 7, '\0'), arrayRead.get(0, 0));
+        assertEquals(StringUtils.rightPad(mdArray.get(0, 1), 7, '\0'), arrayRead.get(0, 1));
+        assertEquals(StringUtils.rightPad(mdArray.get(1, 0), 7, '\0'), arrayRead.get(1, 0));
+        assertEquals(StringUtils.rightPad(mdArray.get(1, 1), 7, '\0'), arrayRead.get(1, 1));
+        assertEquals(2, reader.object().getRank(dsName));
+        assertTrue(Arrays.equals(new long[]
+            { 2, 2 }, reader.object().getDimensions(dsName)));
+        reader.close();
+    }
+
+    @Test
+    public void testTimestamps()
+    {
+        final File datasetFile = new File(workingDirectory, "timestamps.h5");
+        final String timeStampDS = "prehistoric";
+        final long timestampValue = 10000L;
+        final String noTimestampDS = "notatimestamp";
+        final long someLong = 173756123L;
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.time().write(timeStampDS, timestampValue);
+        writer.int64().write(noTimestampDS, someLong);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH, reader
+                .object().tryGetTypeVariant(timeStampDS));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeStampDS);
+        assertTrue(info.isScalar());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5DataClass.INTEGER, info.getTypeInformation().getDataClass());
+        assertTrue(info.isTimeStamp());
+        assertFalse(info.isTimeDuration());
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                info.tryGetTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH, info
+                .getTypeInformation().tryGetTypeVariant());
+        assertEquals(timestampValue, reader.time().readTimeStamp(timeStampDS));
+        assertEquals(timestampValue, reader.time().readDate(timeStampDS).getTime());
+        try
+        {
+            reader.time().readTimeStamp(noTimestampDS);
+            fail("Failed to detect non-timestamp value.");
+        } catch (HDF5JavaException ex)
+        {
+            if (ex.getMessage().contains("not a time stamp") == false)
+            {
+                throw ex;
+            }
+            // That is what we expect.
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimestampArray()
+    {
+        final File datasetFile = new File(workingDirectory, "timestampArray.h5");
+        final String timeSeriesDS = "/some/timeseries";
+        final long[] timeSeries = new long[10];
+        for (int i = 0; i < timeSeries.length; ++i)
+        {
+            timeSeries[i] = i * 10000L;
+        }
+        final long[] notATimeSeries = new long[100];
+        final String noTimeseriesDS = "nota/timeseries";
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.time().writeArray(timeSeriesDS, timeSeries);
+        writer.int64().writeArray(noTimeseriesDS, notATimeSeries);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeSeriesDS);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                info.tryGetTypeVariant());
+        assertChunkSizes(info, 10);
+        assertTrue(Arrays.equals(timeSeries, reader.time().readTimeStampArray(timeSeriesDS)));
+        final Date[] datesRead = reader.readDateArray(timeSeriesDS);
+        final long[] timeStampsRead = new long[datesRead.length];
+        for (int i = 0; i < timeStampsRead.length; ++i)
+        {
+            timeStampsRead[i] = datesRead[i].getTime();
+        }
+        assertTrue(Arrays.equals(timeSeries, timeStampsRead));
+        try
+        {
+            reader.time().readTimeStampArray(noTimeseriesDS);
+            fail("Failed to detect non-timestamp array.");
+        } catch (HDF5JavaException ex)
+        {
+            if (ex.getMessage().contains("not a time stamp") == false)
+            {
+                throw ex;
+            }
+            // That is what we expect.
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimestampArrayChunked()
+    {
+        final File datasetFile = new File(workingDirectory, "timestampArrayChunked.h5");
+        final String timeSeriesDS = "/some/timeseries";
+        final long[] timeSeries = new long[10];
+        for (int i = 0; i < timeSeries.length; ++i)
+        {
+            timeSeries[i] = i * 10000L;
+        }
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.time().createArray(timeSeriesDS, 0, 10, GENERIC_DEFLATE);
+        for (int i = 0; i < 10; ++i)
+        {
+            writer.time().writeArrayBlock(timeSeriesDS, timeSeries, i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeSeriesDS);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                info.tryGetTypeVariant());
+        assertChunkSizes(info, 10);
+        for (int i = 0; i < 10; ++i)
+        {
+            assertTrue(Arrays.equals(timeSeries,
+                    reader.time().readTimeStampArrayBlock(timeSeriesDS, 10, i)));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurations()
+    {
+        final File datasetFile = new File(workingDirectory, "timedurations.h5");
+        final String timeDurationDS = "someDuration";
+        final String timeDurationDS2 = "someOtherDuration";
+        final long timeDurationInSeconds = 10000L;
+        final long timeDurationInMilliSeconds = 10000L * 1000L;
+        final long timeDurationInHoursRounded = 3L;
+        final String noTimestampDS = "notatimeduration";
+        final long someLong = 173756123L;
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.duration().write(timeDurationDS, timeDurationInSeconds, HDF5TimeUnit.SECONDS);
+        final HDF5TimeDuration timeDurationWithUnit =
+                new HDF5TimeDuration(timeDurationInHoursRounded, HDF5TimeUnit.HOURS);
+        writer.writeTimeDuration(timeDurationDS2, timeDurationWithUnit);
+        writer.int64().write(noTimestampDS, someLong);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeDurationDS);
+        assertTrue(info.isScalar());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5DataClass.INTEGER, info.getTypeInformation().getDataClass());
+        assertTrue(info.isTimeDuration());
+        assertFalse(info.isTimeStamp());
+        assertEquals(HDF5TimeUnit.SECONDS, reader.duration().tryGetTimeUnit(timeDurationDS));
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_SECONDS, info.tryGetTypeVariant());
+        assertEquals(HDF5TimeUnit.SECONDS, info.tryGetTimeUnit());
+        assertEquals(timeDurationInSeconds,
+                HDF5TimeUnit.SECONDS.convert(reader.readTimeDuration(timeDurationDS)));
+        assertEquals(timeDurationInMilliSeconds,
+                HDF5TimeUnit.MILLISECONDS.convert(reader.readTimeDuration(timeDurationDS)));
+        assertEquals(timeDurationInHoursRounded,
+                HDF5TimeUnit.HOURS.convert(reader.readTimeDuration(timeDurationDS)));
+        assertEquals(new HDF5TimeDuration(timeDurationInSeconds, HDF5TimeUnit.SECONDS),
+                reader.readTimeDuration(timeDurationDS));
+        assertEquals(timeDurationWithUnit, reader.readTimeDuration(timeDurationDS2));
+        try
+        {
+            reader.readTimeDuration(noTimestampDS);
+            fail("Failed to detect non-timeduration value.");
+        } catch (HDF5JavaException ex)
+        {
+            if (ex.getMessage().contains("not a time duration") == false)
+            {
+                throw ex;
+            }
+            // That is what we expect.
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testSmallTimeDurations()
+    {
+        final File datasetFile = new File(workingDirectory, "smalltimedurations.h5");
+        final String timeDurationDS = "someDuration";
+        final short timeDurationInSeconds = 10000;
+        final long timeDurationInMilliSeconds = 10000L * 1000L;
+        final long timeDurationInHoursRounded = 3L;
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.int16().write(timeDurationDS, timeDurationInSeconds);
+        writer.object().setTypeVariant(timeDurationDS, HDF5TimeUnit.SECONDS.getTypeVariant());
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeDurationDS);
+        assertTrue(info.isScalar());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.tryGetChunkSizes());
+        assertEquals(HDF5DataClass.INTEGER, info.getTypeInformation().getDataClass());
+        assertEquals(NativeData.SHORT_SIZE, info.getTypeInformation().getElementSize());
+        assertTrue(info.isTimeDuration());
+        assertFalse(info.isTimeStamp());
+        assertEquals(HDF5TimeUnit.SECONDS, reader.duration().tryGetTimeUnit(timeDurationDS));
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_SECONDS, info.tryGetTypeVariant());
+        assertEquals(HDF5TimeUnit.SECONDS, info.tryGetTimeUnit());
+        assertEquals(timeDurationInSeconds,
+                HDF5TimeUnit.SECONDS.convert(reader.duration().read(timeDurationDS)));
+        assertEquals(timeDurationInMilliSeconds,
+                HDF5TimeUnit.MILLISECONDS.convert(reader.duration().read(timeDurationDS)));
+        assertEquals(timeDurationInHoursRounded,
+                HDF5TimeUnit.HOURS.convert(reader.readTimeDuration(timeDurationDS)));
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurationArray()
+    {
+        final File datasetFile = new File(workingDirectory, "timedurationarray.h5");
+        final String timeDurationDS = "someDuration";
+        final HDF5TimeDuration[] durationsWritten =
+                new HDF5TimeDuration[]
+                    { new HDF5TimeDuration(2, HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(5, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(1, HDF5TimeUnit.DAYS) };
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.writeTimeDurationArray(timeDurationDS,
+                HDF5TimeDurationArray.create(durationsWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeDurationDS);
+        assertTrue(info.isTimeDuration());
+        assertFalse(info.isTimeStamp());
+        assertEquals(HDF5TimeUnit.SECONDS, info.tryGetTimeUnit());
+        final HDF5TimeDurationArray durationsRead = reader.readTimeDurationArray(timeDurationDS);
+        assertEquals(durationsWritten.length, durationsRead.getLength());
+        for (int i = 0; i < durationsWritten.length; ++i)
+        {
+            assertTrue(durationsRead.get(i).isEquivalent(durationsWritten[i]));
+        }
+        assertEquals(new HDF5TimeDurationMDArray(new long[]
+            { 2, 18000, 86400 }, new int[]
+            { 3 }, HDF5TimeUnit.SECONDS), reader.duration().readMDArray(timeDurationDS));
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurationMDArray()
+    {
+        final File datasetFile = new File(workingDirectory, "timedurationarray.h5");
+        final String timeDurationDS = "someDuration";
+        final HDF5TimeDurationMDArray durationsWritten =
+                new HDF5TimeDurationMDArray(new HDF5TimeDuration[]
+                    { new HDF5TimeDuration(2, HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(4, HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(8, HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(16, HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(1, HDF5TimeUnit.MINUTES),
+                            new HDF5TimeDuration(17, HDF5TimeUnit.MINUTES),
+                            new HDF5TimeDuration(42, HDF5TimeUnit.MINUTES),
+                            new HDF5TimeDuration(111, HDF5TimeUnit.MINUTES),
+                            new HDF5TimeDuration(5, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(10, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(20, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(40, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(1, HDF5TimeUnit.DAYS),
+                            new HDF5TimeDuration(2, HDF5TimeUnit.DAYS),
+                            new HDF5TimeDuration(4, HDF5TimeUnit.DAYS),
+                            new HDF5TimeDuration(8, HDF5TimeUnit.DAYS), }, new int[]
+                    { 4, 4 });
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.duration().writeMDArray(timeDurationDS, durationsWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeDurationDS);
+        assertTrue(info.isTimeDuration());
+        assertFalse(info.isTimeStamp());
+        assertEquals(HDF5TimeUnit.SECONDS, info.tryGetTimeUnit());
+        final HDF5TimeDurationMDArray durationsRead = reader.duration().readMDArray(timeDurationDS);
+        assertEquals(durationsWritten, durationsRead);
+        assertEquals(new HDF5TimeDurationMDArray(new long[]
+            { 2, 4, 8, 16 }, new int[]
+            { 1, 4 }, HDF5TimeUnit.SECONDS),
+                reader.duration().readMDArrayBlock(timeDurationDS, new int[]
+                    { 1, 4 }, new long[]
+                    { 0, 0 }));
+        assertEquals(
+                new HDF5TimeDurationMDArray(new long[]
+                    { 1, 17, 42, 111 }, new int[]
+                    { 1, 4 }, HDF5TimeUnit.MINUTES),
+                HDF5TimeUnit.MINUTES.convert(reader.duration().readMDArrayBlock(timeDurationDS,
+                        new int[]
+                            { 1, 4 }, new long[]
+                            { 1, 0 })));
+        assertEquals(
+                new HDF5TimeDurationMDArray(new long[]
+                    { 5, 10, 20, 40 }, new int[]
+                    { 1, 4 }, HDF5TimeUnit.HOURS),
+                HDF5TimeUnit.HOURS.convert(reader.duration().readMDArrayBlock(timeDurationDS,
+                        new int[]
+                            { 1, 4 }, new long[]
+                            { 2, 0 })));
+        assertEquals(
+                new HDF5TimeDurationMDArray(new long[]
+                    { 1, 2, 4, 8 }, new int[]
+                    { 1, 4 }, HDF5TimeUnit.DAYS),
+                HDF5TimeUnit.DAYS.convert(reader.duration().readMDArrayBlock(timeDurationDS,
+                        new int[]
+                            { 1, 4 }, new long[]
+                            { 3, 0 })));
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurationArrayChunked()
+    {
+        final File datasetFile = new File(workingDirectory, "timeDurationArrayChunked.h5");
+        final String timeDurationSeriesDS = "/some/timeseries";
+        final String timeDurationSeriesDS2 = "/some/timeseries2";
+        final long[] timeDurationSeriesMillis = new long[10];
+        final long[] timeDurationSeriesMicros = new long[10];
+        for (int i = 0; i < timeDurationSeriesMillis.length; ++i)
+        {
+            timeDurationSeriesMillis[i] = i * 10000L;
+            timeDurationSeriesMicros[i] = timeDurationSeriesMillis[i] * 1000L;
+        }
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        writer.duration().createArray(timeDurationSeriesDS, 100, 10, HDF5TimeUnit.MILLISECONDS,
+                GENERIC_DEFLATE);
+        for (int i = 0; i < 10; ++i)
+        {
+            writer.duration().writeArrayBlock(timeDurationSeriesDS,
+                    new HDF5TimeDurationArray(timeDurationSeriesMicros, HDF5TimeUnit.MICROSECONDS),
+                    i);
+        }
+        writer.duration().createArray(timeDurationSeriesDS2, 100, 10, HDF5TimeUnit.SECONDS,
+                GENERIC_DEFLATE);
+        final HDF5TimeDuration[] timeDurationSeries =
+                new HDF5TimeDuration[]
+                    {
+                            new HDF5TimeDuration(timeDurationSeriesMicros[0],
+                                    HDF5TimeUnit.MICROSECONDS),
+                            new HDF5TimeDuration(timeDurationSeriesMicros[1],
+                                    HDF5TimeUnit.MICROSECONDS),
+                            new HDF5TimeDuration(timeDurationSeriesMillis[2],
+                                    HDF5TimeUnit.MILLISECONDS),
+                            new HDF5TimeDuration(timeDurationSeriesMillis[3],
+                                    HDF5TimeUnit.MILLISECONDS),
+                            new HDF5TimeDuration(timeDurationSeriesMillis[4] / 1000L,
+                                    HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(timeDurationSeriesMillis[5] / 1000L,
+                                    HDF5TimeUnit.SECONDS),
+                            new HDF5TimeDuration(6, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(7, HDF5TimeUnit.HOURS),
+                            new HDF5TimeDuration(8, HDF5TimeUnit.DAYS),
+                            new HDF5TimeDuration(9, HDF5TimeUnit.DAYS) };
+        for (int i = 0; i < 10; ++i)
+        {
+            writer.duration().writeArrayBlock(timeDurationSeriesDS2,
+                    HDF5TimeDurationArray.create(timeDurationSeries), i);
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        final HDF5DataSetInformation info = reader.getDataSetInformation(timeDurationSeriesDS);
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MILLISECONDS, info.tryGetTypeVariant());
+        assertChunkSizes(info, 10);
+        for (int i = 0; i < 10; ++i)
+        {
+            assertTrue(Arrays.equals(
+                    timeDurationSeriesMicros,
+                    HDF5TimeUnit.MICROSECONDS.convert(reader.duration().readArrayBlock(
+                            timeDurationSeriesDS, 10, i))));
+        }
+        final HDF5DataSetInformation info2 = reader.getDataSetInformation(timeDurationSeriesDS2);
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_SECONDS, info2.tryGetTypeVariant());
+        assertChunkSizes(info2, 10);
+        for (int i = 0; i < 10; ++i)
+        {
+            final long[] block =
+                    HDF5TimeUnit.MICROSECONDS.convert(reader.duration().readArrayBlock(
+                            timeDurationSeriesDS2, 10, i));
+            for (int j = 0; j < block.length; ++j)
+            {
+                assertEquals(HDF5TimeUnit.MICROSECONDS.convert(timeDurationSeries[j]), block[j]);
+            }
+        }
+        for (int i = 0; i < 10; ++i)
+        {
+            final HDF5TimeDurationArray block =
+                    reader.duration().readArrayBlock(timeDurationSeriesDS2, 10, i);
+            for (int j = 0; j < block.getLength(); ++j)
+            {
+                assertTrue(block.get(j).isEquivalent(timeDurationSeries[j]));
+            }
+        }
+        for (HDF5DataBlock<HDF5TimeDurationArray> block : reader.duration().getArrayNaturalBlocks(
+                timeDurationSeriesDS2))
+        {
+            final HDF5TimeDurationArray data = block.getData();
+            for (int j = 0; j < data.getLength(); ++j)
+            {
+                assertTrue(data.get(j) + "<>" + timeDurationSeries[j],
+                        data.get(j).isEquivalent(timeDurationSeries[j]));
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "attributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(attributeFile);
+        final String datasetName = "SomeDataSet";
+        writer.int32().writeArray(datasetName, new int[0]);
+        final String booleanAttributeName = "Boolean Attribute";
+        final boolean booleanAttributeValueWritten = true;
+        writer.bool().setAttr(datasetName, booleanAttributeName, booleanAttributeValueWritten);
+        assertTrue(writer.object().hasAttribute(datasetName, booleanAttributeName));
+        final String integerAttributeName = "Integer Attribute";
+        final int integerAttributeValueWritten = 17;
+        writer.int32().setAttr(datasetName, integerAttributeName, integerAttributeValueWritten);
+        final String byteAttributeName = "Byte Attribute";
+        final byte byteAttributeValueWritten = 17;
+        writer.int8().setAttr(datasetName, byteAttributeName, byteAttributeValueWritten);
+        final String unsignedByteAttributeName = "Unsigned Byte Attribute";
+        final short unsignedByteAttributeValueWritten = 128;
+        writer.uint8().setAttr(datasetName, unsignedByteAttributeName,
+                (byte) unsignedByteAttributeValueWritten);
+        final String stringAttributeName = "String Attribute";
+        final String stringAttributeValueWritten = "Some String Value";
+        writer.string().setAttr(datasetName, stringAttributeName, stringAttributeValueWritten);
+        final String stringAttributeNameVL = "String Attribute VL";
+        final String stringAttributeValueVLWritten1 = "Some String Value";
+        writer.string().setAttrVL(datasetName, stringAttributeNameVL,
+                stringAttributeValueVLWritten1);
+        final String stringAttributeValueVLWritten2 = "Some Other String Value";
+        writer.string().setAttrVL(datasetName, stringAttributeNameVL,
+                stringAttributeValueVLWritten2);
+        final String integerArrayAttributeName = "Integer Array Attribute";
+        final int[] integerArrayAttributeValueWritten = new int[]
+            { 17, 23, 42 };
+        writer.int32().setArrayAttr(datasetName, integerArrayAttributeName,
+                integerArrayAttributeValueWritten);
+        final String stringArrayAttributeName = "String Array Attribute";
+        final String[] stringArrayAttributeValueWritten = new String[]
+            { "Some String Value I", "Some String Value II", "Some String Value III" };
+        writer.string().setArrayAttr(datasetName, stringArrayAttributeName,
+                stringArrayAttributeValueWritten);
+        final String string2DArrayAttributeName = "String 2D Array Attribute";
+        final MDArray<String> string2DArrayAttributeValueWritten =
+                new MDArray<String>(
+                        new String[]
+                            { "Some String Value I", "Some String Value II",
+                                    "Some String Value III", "IV" }, new int[]
+                            { 2, 2 });
+        writer.string().setMDArrayAttr(datasetName, string2DArrayAttributeName,
+                string2DArrayAttributeValueWritten);
+        final HDF5EnumerationType enumType = writer.enumeration().getType("MyEnum", new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        final String enumAttributeName = "Enum Attribute";
+        final HDF5EnumerationValue enumAttributeValueWritten =
+                new HDF5EnumerationValue(enumType, "TWO");
+        writer.enumeration().setAttr(datasetName, enumAttributeName, enumAttributeValueWritten);
+        assertEquals(enumAttributeValueWritten.getType(),
+                writer.enumeration().getAttributeType(datasetName, enumAttributeName));
+        final String enumArrayAttributeName = "Enum Array Attribute";
+        final HDF5EnumerationValueArray enumArrayAttributeValueWritten =
+                new HDF5EnumerationValueArray(enumType, new String[]
+                    { "TWO", "THREE", "ONE" });
+        writer.enumeration().setArrayAttr(datasetName, enumArrayAttributeName,
+                enumArrayAttributeValueWritten);
+        final String enumMDArrayAttributeName = "Enum Array MD Attribute";
+        final HDF5EnumerationValueMDArray enumMDArrayAttributeValueWritten =
+                new HDF5EnumerationValueMDArray(enumType, new MDArray<String>(new String[]
+                    { "TWO", "THREE", "ONE", "ONE" }, new int[]
+                    { 2, 2 }));
+        writer.enumeration().setMDArrayAttr(datasetName, enumMDArrayAttributeName,
+                enumMDArrayAttributeValueWritten);
+        final String volatileAttributeName = "Some Volatile Attribute";
+        writer.int32().setAttr(datasetName, volatileAttributeName, 21);
+        writer.object().deleteAttribute(datasetName, volatileAttributeName);
+        final String floatArrayAttributeName = "Float Array Attribute";
+        final float[] floatArrayAttribute = new float[]
+            { 3f, 3.1f, 3.14f, 3.142f, 3.1416f };
+        writer.float32().setArrayAttr(datasetName, floatArrayAttributeName, floatArrayAttribute);
+        final String floatArrayMDAttributeName = "Float Array Multi-dimensional Attribute";
+        final MDFloatArray floatMatrixAttribute = new MDFloatArray(new float[][]
+            {
+                { 1, 2, 3 },
+                { 4, 5, 6 } });
+        writer.float32().setMDArrayAttr(datasetName, floatArrayMDAttributeName,
+                floatMatrixAttribute);
+        final MDFloatArray floatMatrixAttribute2 = new MDFloatArray(new float[][]
+            {
+                { 2, 3, 4 },
+                { 7, 8, 9 } });
+        writer.float32().setMatrixAttr(datasetName, floatArrayMDAttributeName,
+                floatMatrixAttribute2.toMatrix());
+        final String byteArrayAttributeName = "Byte Array Attribute";
+        final byte[] byteArrayAttribute = new byte[]
+            { 1, 2, 3 };
+        writer.int8().setArrayAttr(datasetName, byteArrayAttributeName, byteArrayAttribute);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(attributeFile);
+        assertTrue(reader.object().hasAttribute(datasetName, booleanAttributeName));
+        final boolean booleanAttributeValueRead =
+                reader.bool().getAttr(datasetName, booleanAttributeName);
+        assertEquals(booleanAttributeValueWritten, booleanAttributeValueRead);
+        final int integerAttributeValueRead =
+                reader.int32().getAttr(datasetName, integerAttributeName);
+        assertEquals(integerAttributeValueWritten, integerAttributeValueRead);
+        final byte byteAttributeValueRead = reader.int8().getAttr(datasetName, byteAttributeName);
+        assertEquals(byteAttributeValueWritten, byteAttributeValueRead);
+        final short unsignedByteAttributeValueRead =
+                reader.int16().getAttr(datasetName, unsignedByteAttributeName);
+        assertEquals(unsignedByteAttributeValueWritten, unsignedByteAttributeValueRead);
+        HDF5DataTypeInformation info =
+                reader.object().getAttributeInformation(datasetName, integerAttributeName);
+        assertEquals(HDF5DataClass.INTEGER, info.getDataClass());
+        assertEquals(4, info.getElementSize());
+        final String stringAttributeValueRead =
+                reader.string().getAttr(datasetName, stringAttributeName);
+        assertEquals(stringAttributeValueWritten, stringAttributeValueRead);
+        final int[] intArrayAttributeValueRead =
+                reader.int32().getArrayAttr(datasetName, integerArrayAttributeName);
+        assertTrue(Arrays.equals(integerArrayAttributeValueWritten, intArrayAttributeValueRead));
+        final String[] stringArrayAttributeValueRead =
+                reader.string().getArrayAttr(datasetName, stringArrayAttributeName);
+        assertTrue(Arrays.equals(stringArrayAttributeValueWritten, stringArrayAttributeValueRead));
+        info = reader.object().getAttributeInformation(datasetName, stringArrayAttributeName);
+        assertTrue(info.isArrayType());
+        assertEquals(HDF5DataClass.STRING, info.getDataClass());
+        assertEquals(21, info.getElementSize()); // longest string in the string array
+        assertEquals(3, info.getNumberOfElements());
+        assertEquals(1, info.getDimensions().length);
+        final MDArray<String> string2DArrayAttributeValueRead =
+                reader.string().getMDArrayAttr(datasetName, string2DArrayAttributeName);
+        assertEquals(string2DArrayAttributeValueWritten, string2DArrayAttributeValueRead);
+        final String stringAttributeValueVLRead =
+                reader.string().getAttr(datasetName, stringAttributeNameVL);
+        assertEquals(stringAttributeValueVLWritten2, stringAttributeValueVLRead);
+        final HDF5EnumerationValue enumAttributeValueRead =
+                reader.enumeration().getAttr(datasetName, enumAttributeName);
+        final String enumAttributeStringValueRead =
+                reader.enumeration().getAttrAsString(datasetName, enumAttributeName);
+        assertEquals(enumAttributeValueWritten.getValue(), enumAttributeValueRead.getValue());
+        assertEquals(enumAttributeValueWritten.getValue(), enumAttributeStringValueRead);
+        final HDF5EnumerationType enumAttributeType =
+                reader.enumeration().getAttributeType(datasetName, enumAttributeName);
+        assertEquals(enumAttributeValueWritten.getType(), enumAttributeType);
+        assertEquals("MyEnum", enumAttributeType.getName());
+        final String[] enumArrayAttributeReadAsString =
+                reader.enumeration().getArrayAttr(datasetName, enumArrayAttributeName)
+                        .toStringArray();
+        assertEquals(enumArrayAttributeValueWritten.getLength(),
+                enumArrayAttributeReadAsString.length);
+        for (int i = 0; i < enumArrayAttributeReadAsString.length; ++i)
+        {
+            assertEquals(enumArrayAttributeValueWritten.getValue(i),
+                    enumArrayAttributeReadAsString[i]);
+        }
+        final HDF5EnumerationValueArray enumArrayAttributeRead =
+                reader.enumeration().getArrayAttr(datasetName, enumArrayAttributeName);
+        final HDF5EnumerationType enumAttributeArrayType =
+                reader.enumeration().getAttributeType(datasetName, enumArrayAttributeName);
+        assertEquals(enumArrayAttributeRead.getType(), enumAttributeArrayType);
+        assertEquals("MyEnum", enumAttributeArrayType.getName());
+        assertEquals(enumArrayAttributeValueWritten.getLength(), enumArrayAttributeRead.getLength());
+        for (int i = 0; i < enumArrayAttributeRead.getLength(); ++i)
+        {
+            assertEquals(enumArrayAttributeValueWritten.getValue(i),
+                    enumArrayAttributeRead.getValue(i));
+        }
+        // Let's try to read the first element of the array using getEnumAttributeAsString
+        assertEquals(enumArrayAttributeValueWritten.getValue(0), reader.enumeration()
+                .getAttrAsString(datasetName, enumArrayAttributeName));
+        // Let's try to read the first element of the array using getEnumAttribute
+        assertEquals(enumArrayAttributeValueWritten.getValue(0),
+                reader.enumeration().getAttr(datasetName, enumArrayAttributeName).getValue());
+        assertFalse(reader.object().hasAttribute(datasetName, volatileAttributeName));
+        final HDF5EnumerationValueMDArray enumMDArrayAttributeRead =
+                reader.enumeration().getMDArrayAttr(datasetName, enumMDArrayAttributeName);
+        assertEquals(enumMDArrayAttributeValueWritten.toStringArray(),
+                enumMDArrayAttributeRead.toStringArray());
+        assertEquals(enumArrayAttributeValueWritten.getLength(), enumArrayAttributeRead.getLength());
+        for (int i = 0; i < enumArrayAttributeRead.getLength(); ++i)
+        {
+            assertEquals(enumArrayAttributeValueWritten.getValue(i),
+                    enumArrayAttributeRead.getValue(i));
+        }
+        assertTrue(Arrays.equals(floatArrayAttribute,
+                reader.float32().getArrayAttr(datasetName, floatArrayAttributeName)));
+        assertTrue(floatMatrixAttribute2.equals(reader.float32().getMDArrayAttr(datasetName,
+                floatArrayMDAttributeName)));
+        assertTrue(floatMatrixAttribute2.equals(new MDFloatArray(reader.float32().getMatrixAttr(
+                datasetName, floatArrayMDAttributeName))));
+        assertTrue(Arrays.equals(byteArrayAttribute,
+                reader.int8().getArrayAttr(datasetName, byteArrayAttributeName)));
+        reader.close();
+    }
+
+    @Test
+    public void testSimpleDataspaceAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "simpleDataspaceAttributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(attributeFile)
+                        .useSimpleDataSpaceForAttributes().writer();
+        final String datasetName = "SomeDataSet";
+        final String floatAttrName = "SomeFloatAttr";
+        final String floatAttrArrayName = "SomeFloatArrayAttr";
+        final String floatAttrMDArrayName = "SomeFloatMDArrayAttr";
+        final String unsignedIntAttrName = "SomeUnsignedIntAttr";
+        final String unsignedIntAttrArrayName = "SomeUnsignedIntArrayAttr";
+        final String unsignedIntAttrMDArrayName = "SomeUnsignedIntMDArrayAttr";
+        final String referenceAttrArrayName = "SomeRefAttr";
+        final String dateTimeAttrName = "SomeDateTimeAttr";
+        final String dateTimeAttrArrayName = "SomeDateTimeArrayAttr";
+        final String timeDurationAttrName = "SomeTimeDurationAttr";
+        final String timeDurationAttrArrayName = "SomeTimeDurationArrayAttr";
+        writer.float32().writeArray(datasetName, new float[0]);
+        writer.float32().setAttr(datasetName, floatAttrName, 17.0f);
+        final float[] floatArrayValueWritten = new float[]
+            { 1, 2, 3, };
+        writer.float32().setArrayAttr(datasetName, floatAttrArrayName, floatArrayValueWritten);
+        final MDFloatArray floatMDArrayWritten = new MDFloatArray(new float[]
+            { 1, 2, 3, 4 }, new int[]
+            { 2, 2 });
+        writer.float32().setMDArrayAttr(datasetName, floatAttrMDArrayName, floatMDArrayWritten);
+        writer.uint32().setAttr(datasetName, unsignedIntAttrName, toInt32(4000000000L));
+        final int[] uintArrayValueWritten = new int[]
+            { toInt32(4000000001L), toInt32(4000000002L), toInt32(4000000003L) };
+        writer.uint32().setArrayAttr(datasetName, unsignedIntAttrArrayName, uintArrayValueWritten);
+        final MDIntArray uintMDArrayValueWritten =
+                new MDIntArray(new int[]
+                    { toInt32(4000000000L), toInt32(4000000002L), toInt32(4000000003L),
+                            toInt32(4000000003L) }, new int[]
+                    { 2, 2 });
+        writer.uint32().setMDArrayAttr(datasetName, unsignedIntAttrMDArrayName,
+                uintMDArrayValueWritten);
+        writer.reference().setArrayAttr(datasetName, referenceAttrArrayName, new String[]
+            { datasetName, datasetName });
+        writer.time().setAttr(datasetName, dateTimeAttrName, 1000L);
+        writer.time().setArrayAttr(datasetName, dateTimeAttrArrayName, new long[]
+            { 1000L, 2000L });
+        writer.duration().setAttr(datasetName, timeDurationAttrName,
+                new HDF5TimeDuration(100L, HDF5TimeUnit.SECONDS));
+        writer.duration().setArrayAttr(datasetName, timeDurationAttrArrayName,
+                new HDF5TimeDurationArray(new long[]
+                    { 100L, 150L }, HDF5TimeUnit.SECONDS));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(attributeFile);
+        assertTrue(reader.object().hasAttribute(datasetName, floatAttrName));
+        final float attributeValue = reader.float32().getAttr(datasetName, floatAttrName);
+        assertEquals(17.0f, attributeValue);
+        final HDF5BaseReader baseReader = ((HDF5FloatReader) reader.float32()).getBaseReader();
+        final int objectId =
+                baseReader.h5.openObject(baseReader.fileId, datasetName, baseReader.fileRegistry);
+        int attributeId =
+                baseReader.h5.openAttribute(objectId, floatAttrName, baseReader.fileRegistry);
+        int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_FLOAT, baseReader.h5.getClassType(attributeTypeId));
+        assertTrue(reader.object().hasAttribute(datasetName, floatAttrArrayName));
+        final float[] attributeArrayValueRead =
+                reader.float32().getArrayAttr(datasetName, floatAttrArrayName);
+        assertTrue(Arrays.equals(floatArrayValueWritten, attributeArrayValueRead));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, floatAttrArrayName, baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_FLOAT, baseReader.h5.getClassType(attributeTypeId));
+        assertTrue(reader.object().hasAttribute(datasetName, floatAttrMDArrayName));
+        final MDFloatArray attributeMDArrayValueRead =
+                reader.float32().getMDArrayAttr(datasetName, floatAttrMDArrayName);
+        assertEquals(floatMDArrayWritten, attributeMDArrayValueRead);
+        assertEquals(toInt32(4000000000L), reader.uint32()
+                .getAttr(datasetName, unsignedIntAttrName));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, unsignedIntAttrName, baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_INTEGER, baseReader.h5.getClassType(attributeTypeId));
+        assertTrue(Arrays.equals(uintArrayValueWritten,
+                reader.uint32().getArrayAttr(datasetName, unsignedIntAttrArrayName)));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, unsignedIntAttrArrayName,
+                        baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_INTEGER, baseReader.h5.getClassType(attributeTypeId));
+        assertEquals(uintMDArrayValueWritten,
+                reader.uint32().getMDArrayAttr(datasetName, unsignedIntAttrMDArrayName));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, unsignedIntAttrMDArrayName,
+                        baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_INTEGER, baseReader.h5.getClassType(attributeTypeId));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, referenceAttrArrayName,
+                        baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_REFERENCE, baseReader.h5.getClassType(attributeTypeId));
+        final String[] referenceValues =
+                reader.reference().getArrayAttr(datasetName, referenceAttrArrayName);
+        assertEquals(2, referenceValues.length);
+        assertEquals("/" + datasetName, referenceValues[0]);
+        assertEquals("/" + datasetName, referenceValues[1]);
+        assertEquals(1000L, reader.time().getAttrAsLong(datasetName, dateTimeAttrName));
+        assertTrue(Arrays.equals(new long[]
+            { 1000L, 2000L }, reader.time().getArrayAttrAsLong(datasetName, dateTimeAttrArrayName)));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, dateTimeAttrName, baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_INTEGER, baseReader.h5.getClassType(attributeTypeId));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, "__TYPE_VARIANT__" + dateTimeAttrName + "__",
+                        baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_ENUM, baseReader.h5.getClassType(attributeTypeId));
+        assertEquals(new HDF5TimeDuration(100L, HDF5TimeUnit.SECONDS),
+                reader.duration().getAttr(datasetName, timeDurationAttrName));
+        assertEquals(new HDF5TimeDurationArray(new long[]
+            { 100L, 150L }, HDF5TimeUnit.SECONDS),
+                reader.duration().getArrayAttr(datasetName, timeDurationAttrArrayName));
+        attributeId =
+                baseReader.h5
+                        .openAttribute(objectId, timeDurationAttrName, baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_INTEGER, baseReader.h5.getClassType(attributeTypeId));
+        attributeId =
+                baseReader.h5.openAttribute(objectId, "__TYPE_VARIANT__" + timeDurationAttrName
+                        + "__", baseReader.fileRegistry);
+        attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, baseReader.fileRegistry);
+        assertEquals(H5T_ENUM, baseReader.h5.getClassType(attributeTypeId));
+        reader.close();
+    }
+
+    @Test
+    public void testTimeStampAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "timeStampAttributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(attributeFile);
+        final String datasetName = "SomeDataSet";
+        final String lastChangedAttr = "lastChanged";
+        final String someLongAttr = "someLong";
+        final Date now = new Date();
+        writer.int32().writeArray(datasetName, new int[0]);
+        writer.int64().setAttr(datasetName, someLongAttr, 115L);
+        writer.time().setAttr(datasetName, lastChangedAttr, now);
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(attributeFile);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH, reader
+                .object().tryGetTypeVariant(datasetName, lastChangedAttr));
+        assertFalse(reader.time().isTimeStamp(datasetName));
+        assertTrue(reader.time().isTimeStamp(datasetName, lastChangedAttr));
+        assertFalse(reader.duration().isTimeDuration(datasetName, lastChangedAttr));
+        assertEquals(now, reader.time().getAttr(datasetName, lastChangedAttr));
+        assertFalse(reader.time().isTimeStamp(datasetName, someLongAttr));
+        try
+        {
+            reader.time().getAttrAsLong(datasetName, someLongAttr);
+            fail("Did not detect non-time-stamp attribute.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Attribute 'someLong' of data set 'SomeDataSet' is not a time stamp.",
+                    ex.getMessage());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurationAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "timeDurationAttributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(attributeFile);
+        final String datasetName = "SomeDataSet";
+        final String validUntilAttr = "validUtil";
+        final String someLongAttr = "someLong";
+        writer.int32().writeArray(datasetName, new int[0]);
+        writer.duration().setAttr(datasetName, validUntilAttr, 10, HDF5TimeUnit.MINUTES);
+        writer.int64().setAttr(datasetName, someLongAttr, 115L);
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(attributeFile);
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MINUTES,
+                reader.object().tryGetTypeVariant(datasetName, validUntilAttr));
+        assertFalse(reader.time().isTimeStamp(datasetName));
+        assertFalse(reader.time().isTimeStamp(datasetName, validUntilAttr));
+        assertTrue(reader.duration().isTimeDuration(datasetName, validUntilAttr));
+        assertEquals(HDF5TimeUnit.MINUTES,
+                reader.duration().tryGetTimeUnit(datasetName, validUntilAttr));
+        assertEquals(new HDF5TimeDuration(10, HDF5TimeUnit.MINUTES),
+                reader.duration().getAttr(datasetName, validUntilAttr));
+        assertEquals(
+                10 * 60,
+                reader.duration().getAttr(datasetName, validUntilAttr)
+                        .getValue(HDF5TimeUnit.SECONDS));
+        assertFalse(reader.duration().isTimeDuration(datasetName, someLongAttr));
+        try
+        {
+            reader.duration().getAttr(datasetName, someLongAttr);
+            fail("Did not detect non-time-duration attribute.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Attribute 'someLong' of data set 'SomeDataSet' is not a time duration.",
+                    ex.getMessage());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimeStampArrayAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "timeStampArrayAttributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(attributeFile);
+        final String datasetName = "SomeDataSet";
+        final String lastChangedAttr = "lastChanged";
+        final String someDates = "someDates";
+        final String someLongAttr = "someLong";
+        final Date now = new Date();
+        writer.int32().writeArray(datasetName, new int[0]);
+        writer.int64().setArrayAttr(datasetName, someLongAttr, new long[]
+            { 115L });
+        writer.time().setArrayAttr(datasetName, lastChangedAttr, new Date[]
+            { now });
+        writer.time().setMDArrayAttr(
+                datasetName,
+                someDates,
+                new MDArray<Date>(new Date[]
+                    { now, new Date(now.getTime() - 1000L), new Date(now.getTime() - 2000L),
+                            new Date(now.getTime() - 3000L) }, new int[]
+                    { 2, 2 }));
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(attributeFile);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH, reader
+                .object().tryGetTypeVariant(datasetName, lastChangedAttr));
+        assertFalse(reader.time().isTimeStamp(datasetName));
+        assertTrue(reader.time().isTimeStamp(datasetName, lastChangedAttr));
+        assertFalse(reader.duration().isTimeDuration(datasetName, lastChangedAttr));
+        assertEquals(1, reader.time().getArrayAttr(datasetName, lastChangedAttr).length);
+        assertEquals(now, reader.time().getArrayAttr(datasetName, lastChangedAttr)[0]);
+        assertFalse(reader.time().isTimeStamp(datasetName, someLongAttr));
+        assertTrue(reader.time().isTimeStamp(datasetName, someDates));
+        assertEquals(now.getTime(),
+                reader.time().getMDArrayAttrAsLong(datasetName, someDates).get(0, 0));
+        assertEquals(now.getTime() - 1000L,
+                reader.time().getMDArrayAttrAsLong(datasetName, someDates).get(0, 1));
+        assertEquals(now.getTime() - 2000L,
+                reader.time().getMDArrayAttrAsLong(datasetName, someDates).get(1, 0));
+        assertEquals(now.getTime() - 3000L,
+                reader.time().getMDArrayAttrAsLong(datasetName, someDates).get(1, 1));
+        try
+        {
+            reader.time().getArrayAttrAsLong(datasetName, someLongAttr);
+            fail("Did not detect non-time-stamp attribute.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Attribute 'someLong' of data set 'SomeDataSet' is not a time stamp.",
+                    ex.getMessage());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testTimeDurationArrayAttributes()
+    {
+        final File attributeFile = new File(workingDirectory, "timeDurationArrayAttributes.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(attributeFile);
+        final String datasetName = "SomeDataSet";
+        final String validUntilAttr = "validUtil";
+        final String someDurations = "someDurations";
+        final String someLongAttr = "someLong";
+        writer.int32().writeArray(datasetName, new int[0]);
+        writer.duration().setArrayAttr(datasetName, validUntilAttr,
+                HDF5TimeDurationArray.create(HDF5TimeUnit.MINUTES, 10));
+        final HDF5TimeDurationMDArray someDurationValues = new HDF5TimeDurationMDArray(new long[]
+            { 1, 2, 3, 4 }, new int[]
+            { 2, 2 }, HDF5TimeUnit.MINUTES);
+        writer.duration().setMDArrayAttr(datasetName, someDurations, someDurationValues);
+        writer.int64().setArrayAttr(datasetName, someLongAttr, new long[]
+            { 115L });
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(attributeFile);
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MINUTES,
+                reader.object().tryGetTypeVariant(datasetName, validUntilAttr));
+        assertFalse(reader.time().isTimeStamp(datasetName));
+        assertFalse(reader.time().isTimeStamp(datasetName, validUntilAttr));
+        assertTrue(reader.duration().isTimeDuration(datasetName, validUntilAttr));
+        assertEquals(HDF5TimeUnit.MINUTES,
+                reader.duration().tryGetTimeUnit(datasetName, validUntilAttr));
+        assertEquals(1, reader.duration().getArrayAttr(datasetName, validUntilAttr).getLength());
+        assertEquals(new HDF5TimeDuration(10, HDF5TimeUnit.MINUTES), reader.duration()
+                .getArrayAttr(datasetName, validUntilAttr).get(0));
+        assertEquals(
+                10 * 60,
+                reader.duration().getArrayAttr(datasetName, validUntilAttr)
+                        .getValue(0, HDF5TimeUnit.SECONDS));
+        assertFalse(reader.duration().isTimeDuration(datasetName, someLongAttr));
+        assertTrue(reader.duration().isTimeDuration(datasetName, someDurations));
+        assertEquals(someDurationValues,
+                reader.duration().getMDArrayAttr(datasetName, someDurations));
+        try
+        {
+            reader.duration().getArrayAttr(datasetName, someLongAttr);
+            fail("Did not detect non-time-duration attribute.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Attribute 'someLong' of data set 'SomeDataSet' is not a time duration.",
+                    ex.getMessage());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testAttributeDimensionArray()
+    {
+        final File attributeFile = new File(workingDirectory, "attributeDimensionalArray.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(attributeFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        final String datasetName = "SomeDataSet";
+        final String attributeName = "farray";
+        final float[] farray = new float[]
+            { 0, 10, 100 };
+
+        writer.int32().writeArray(datasetName, new int[0]);
+        efWriter.setFloatArrayAttributeDimensional(datasetName, attributeName, farray);
+        final HDF5DataTypeInformation info =
+                writer.object().getAttributeInformation(datasetName, attributeName);
+        assertEquals("FLOAT(4, #3)", info.toString());
+        assertFalse(info.isArrayType());
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(attributeFile);
+        assertTrue(Arrays.equals(farray, reader.float32().getArrayAttr(datasetName, attributeName)));
+    }
+
+    @Test
+    public void testAttributeDimensionArrayOverwrite()
+    {
+        final File attributeFile =
+                new File(workingDirectory, "attributeDimensionalArrayOverwrite.h5");
+        attributeFile.delete();
+        assertFalse(attributeFile.exists());
+        attributeFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(attributeFile);
+        final HDF5ArrayTypeFloatWriter efWriter = new HDF5ArrayTypeFloatWriter((HDF5Writer) writer);
+        final String datasetName = "SomeDataSet";
+        final String attributeName = "farray";
+        final float[] farray = new float[]
+            { 0, 10, 100 };
+
+        writer.int32().writeArray(datasetName, new int[0]);
+        efWriter.setFloatArrayAttributeDimensional(datasetName, attributeName, farray);
+        writer.float32().setArrayAttr(datasetName, attributeName, farray);
+        final HDF5DataTypeInformation info =
+                writer.object().getAttributeInformation(datasetName, attributeName);
+        assertEquals("FLOAT(4, #3)", info.toString());
+        assertTrue(info.isArrayType());
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(attributeFile);
+        assertTrue(Arrays.equals(farray, reader.float32().getArrayAttr(datasetName, attributeName)));
+    }
+
+    @Test
+    public void testCreateDataTypes()
+    {
+        final File file = new File(workingDirectory, "types.h5");
+        final String enumName = "TestEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        try
+        {
+            final List<String> initialDataTypes =
+                    writer.getGroupMembers(HDF5Utils.getDataTypeGroup(""));
+
+            writer.enumeration().getType(enumName, new String[]
+                { "ONE", "TWO", "THREE" }, false);
+            final Set<String> dataTypes =
+                    new HashSet<String>(writer.getGroupMembers(HDF5Utils.getDataTypeGroup("")));
+            assertEquals(initialDataTypes.size() + 1, dataTypes.size());
+            assertTrue(dataTypes.contains(HDF5Utils.ENUM_PREFIX + enumName));
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    @Test
+    public void testGroups()
+    {
+        final File groupFile = new File(workingDirectory, "groups.h5");
+        groupFile.delete();
+        assertFalse(groupFile.exists());
+        groupFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(groupFile);
+        final String groupName1 = "/group";
+        final String groupName2 = "/group2";
+        final String groupName4 = "/dataSetGroup";
+        final String groupName5 = "/group5";
+        final String dataSetName = groupName4 + "/dataset";
+        writer.object().createGroup(groupName1);
+        writer.object().createGroup(groupName2);
+        writer.int8().writeArray(dataSetName, new byte[]
+            { 1 });
+        assertTrue(writer.isGroup(groupName1));
+        assertTrue(writer.isGroup(groupName2));
+        assertTrue(writer.isGroup(groupName4));
+        assertFalse(writer.isGroup(dataSetName));
+        assertFalse(writer.isGroup(groupName5));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(groupFile);
+        assertTrue(reader.isGroup(groupName1));
+        assertEquals(HDF5ObjectType.GROUP, reader.object().getObjectType(groupName1));
+        assertTrue(reader.isGroup(groupName4));
+        assertEquals(HDF5ObjectType.GROUP, reader.object().getObjectType(groupName4));
+        assertFalse(reader.isGroup(dataSetName));
+        reader.close();
+    }
+
+    @Test
+    public void testDefaultHousekeepingFile()
+    {
+        final File file = new File(workingDirectory, "defaultHousekeepingFile.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        assertEquals("", writer.file().getHouseKeepingNameSuffix());
+        assertEquals("__abc__", writer.object().toHouseKeepingPath("abc"));
+        writer.string().write(writer.object().toHouseKeepingPath("abc"), "ABC");
+        assertTrue(writer.exists("__abc__"));
+        assertTrue(writer.object().getGroupMemberPaths("/").isEmpty());
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertTrue(reader.object().getAttributeNames("/").isEmpty());
+        assertEquals("", reader.file().getHouseKeepingNameSuffix());
+        assertEquals("__abc__", reader.object().toHouseKeepingPath("abc"));
+        assertTrue(reader.exists("__abc__"));
+        assertEquals("ABC", reader.readString("__abc__"));
+        assertTrue(reader.object().getGroupMemberPaths("/").isEmpty());
+        reader.close();
+    }
+
+    @Test
+    public void testNonDefaultHousekeepingFile()
+    {
+        final File file = new File(workingDirectory, "nonDefaultHousekeepingFile.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5Factory.configure(file).houseKeepingNameSuffix("XXX").writer();
+        assertEquals("XXX", writer.file().getHouseKeepingNameSuffix());
+        assertEquals("abcXXX", writer.object().toHouseKeepingPath("abc"));
+        writer.string().write(writer.object().toHouseKeepingPath("abc"), "ABC");
+        assertTrue(writer.exists("abcXXX"));
+        assertFalse(writer.exists("__abc__"));
+        assertTrue(writer.object().getGroupMemberPaths("/").isEmpty());
+        writer.close();
+
+        // The house keeping index is only considered when creating a new file.
+        // If the file exists, the one saved in the file takes precedence.
+        final IHDF5Writer writer2 =
+                HDF5Factory.configure(file).houseKeepingNameSuffix("YYY").writer();
+        assertEquals("XXX", writer2.file().getHouseKeepingNameSuffix());
+        assertEquals("abcXXX", writer2.object().toHouseKeepingPath("abc"));
+        assertTrue(writer2.exists("abcXXX"));
+        writer2.string().write(writer2.object().toHouseKeepingPath("abc"), "CAB");
+        assertFalse(writer2.exists("__abc__"));
+        assertFalse(writer2.exists("abcYYY"));
+        assertEquals("CAB", writer2.readString("abcXXX"));
+        assertTrue(writer2.object().getGroupMemberPaths("/").isEmpty());
+        writer2.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        assertTrue(reader.object().getAttributeNames("/").isEmpty());
+        assertEquals("XXX", reader.file().getHouseKeepingNameSuffix());
+        assertEquals("abcXXX", reader.object().toHouseKeepingPath("abc"));
+        assertTrue(reader.exists("abcXXX"));
+        assertFalse(reader.exists("__abc__"));
+        assertEquals("CAB", reader.readString("abcXXX"));
+        assertTrue(reader.object().getGroupMemberPaths("/").isEmpty());
+        reader.close();
+    }
+
+    @Test
+    public void testHousekeepingFileSuffixNonPrintable()
+    {
+        final File file = new File(workingDirectory, "housekeepingFileSuffixNonPrintable.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5Factory.configure(file).houseKeepingNameSuffix("\1\0").writer();
+        assertEquals("\1\0", writer.file().getHouseKeepingNameSuffix());
+        assertEquals("abc\1\0", writer.object().toHouseKeepingPath("abc"));
+        writer.string().write(writer.object().toHouseKeepingPath("abc"), "ABC");
+        assertTrue(writer.exists("abc\1\0"));
+        assertFalse(writer.exists("__abc__"));
+        assertTrue(writer.object().getGroupMemberPaths("/").isEmpty());
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        assertEquals("\1\0", reader.file().getHouseKeepingNameSuffix());
+        assertEquals("abc\1\0", reader.object().toHouseKeepingPath("abc"));
+        assertTrue(reader.exists("abc\1\0"));
+        assertFalse(reader.exists("__abc__"));
+        assertEquals("ABC", reader.readString("abc\1\0"));
+        assertTrue(reader.object().getGroupMemberPaths("/").isEmpty());
+        reader.close();
+    }
+
+    @Test
+    public void testGetObjectType()
+    {
+        final File file = new File(workingDirectory, "typeInfo.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.writeBoolean("/some/flag", false);
+        writer.object().createSoftLink("/some", "/linkToSome");
+        writer.object().createSoftLink("/some/flag", "/linkToFlag");
+        writer.object().createHardLink("/some/flag", "/some/flag2");
+        writer.bool().setAttr("/some/flag2", "test", true);
+        assertEquals(HDF5ObjectType.GROUP, writer.object().getObjectType("/some"));
+        assertEquals(HDF5ObjectType.SOFT_LINK, writer.object().getObjectType("/linkToSome", false));
+        assertEquals(HDF5ObjectType.GROUP, writer.object().getObjectType("/some"));
+        assertEquals(HDF5ObjectType.GROUP, writer.object().getObjectType("/linkToSome"));
+        assertEquals(HDF5ObjectType.DATASET, writer.object().getObjectType("/some/flag", false));
+        assertEquals(HDF5ObjectType.DATASET, writer.object().getObjectType("/some/flag"));
+        assertEquals(HDF5ObjectType.SOFT_LINK, writer.object().getObjectType("/linkToFlag", false));
+        assertEquals(HDF5ObjectType.DATASET, writer.object().getObjectType("/linkToFlag"));
+        assertFalse(writer.exists("non_existent"));
+        assertEquals(HDF5ObjectType.NONEXISTENT, writer.object().getObjectType("non_existent"));
+        writer.close();
+    }
+
+    @Test(expectedExceptions = HDF5JavaException.class)
+    public void testGetLinkInformationFailed()
+    {
+        final File file = new File(workingDirectory, "linkInfo.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        try
+        {
+            assertFalse(writer.exists("non_existent"));
+            writer.object().getLinkInformation("non_existent").checkExists();
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    @Test
+    public void testGetDataSetInformation()
+    {
+        final File file = new File(workingDirectory, "dsInfo.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.int32().write("dsScalar", 12);
+        writer.int16().writeMatrix("ds", new short[][]
+            {
+                { (short) 1, (short) 2, (short) 3 },
+                { (short) 4, (short) 5, (short) 6 } });
+        final String s = "this is a string";
+        writer.string().write("stringDS", s);
+        writer.string().writeVL("stringDSVL", s);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5DataSetInformation scalarInfo = reader.getDataSetInformation("dsScalar");
+        assertEquals(HDF5DataClass.INTEGER, scalarInfo.getTypeInformation().getDataClass());
+        assertEquals(4, scalarInfo.getTypeInformation().getElementSize());
+        assertEquals(0, scalarInfo.getRank());
+        assertTrue(scalarInfo.isScalar());
+        assertEquals(0, scalarInfo.getDimensions().length);
+        assertNull(scalarInfo.tryGetChunkSizes());
+        final HDF5DataSetInformation info = reader.getDataSetInformation("ds");
+        assertEquals(HDF5DataClass.INTEGER, info.getTypeInformation().getDataClass());
+        assertEquals(2, info.getTypeInformation().getElementSize());
+        assertEquals(2, info.getRank());
+        assertFalse(info.isScalar());
+        assertEquals(2, info.getDimensions()[0]);
+        assertEquals(3, info.getDimensions()[1]);
+        assertChunkSizes(info, 2, 3);
+        final HDF5DataSetInformation stringInfo = reader.getDataSetInformation("stringDS");
+        assertEquals(HDF5DataClass.STRING, stringInfo.getTypeInformation().getDataClass());
+        assertEquals(s.length(), stringInfo.getTypeInformation().getElementSize());
+        assertEquals(0, stringInfo.getDimensions().length);
+        assertEquals(0, stringInfo.getMaxDimensions().length);
+        assertEquals(HDF5StorageLayout.COMPACT, stringInfo.getStorageLayout());
+        assertNull(stringInfo.tryGetChunkSizes());
+        final HDF5DataSetInformation stringInfoVL = reader.getDataSetInformation("stringDSVL");
+        assertEquals(HDF5DataClass.STRING, stringInfoVL.getTypeInformation().getDataClass());
+        assertTrue(stringInfoVL.getTypeInformation().isVariableLengthString());
+        assertEquals(-1, stringInfoVL.getTypeInformation().getElementSize());
+        assertEquals(0, stringInfoVL.getDimensions().length);
+        assertEquals(HDF5StorageLayout.COMPACT, stringInfoVL.getStorageLayout());
+        assertNull(stringInfoVL.tryGetChunkSizes());
+        assertEquals(0, stringInfoVL.getDimensions().length);
+        assertEquals(0, stringInfoVL.getMaxDimensions().length);
+        reader.close();
+    }
+
+    @Test(expectedExceptions = HDF5SymbolTableException.class)
+    public void testGetDataSetInformationFailed()
+    {
+        final File file = new File(workingDirectory, "dsInfo.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        try
+        {
+            assertFalse(writer.exists("non_existent"));
+            writer.getDataSetInformation("non_existent");
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    @Test
+    public void testGetGroupMemberInformation()
+    {
+        final File groupFile = new File(workingDirectory, "groupMemberInformation.h5");
+        groupFile.delete();
+        assertFalse(groupFile.exists());
+        groupFile.deleteOnExit();
+        final String groupName1 = "/group";
+        final String groupName2 = "/dataSetGroup";
+        final String dataSetName = groupName2 + "/dataset";
+        final String dataSetName2 = "ds2";
+        final String linkName = "/link";
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(groupFile);
+        try
+        {
+            writer.object().createGroup(groupName1);
+            writer.int8().writeArray(dataSetName, new byte[]
+                { 1 });
+            writer.string().write(dataSetName2, "abc");
+            writer.object().createSoftLink(dataSetName2, linkName);
+        } finally
+        {
+            writer.close();
+        }
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(groupFile);
+        final Map<String, HDF5LinkInformation> map = new HashMap<String, HDF5LinkInformation>();
+        for (HDF5LinkInformation info : reader.object().getAllGroupMemberInformation("/", false))
+        {
+            map.put(info.getPath(), info);
+        }
+        HDF5LinkInformation info;
+        assertEquals(5, map.size());
+        info = map.get(groupName1);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.GROUP, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get(groupName2);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.GROUP, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get("/" + dataSetName2);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.DATASET, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get(linkName);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.SOFT_LINK, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+
+        map.clear();
+        for (HDF5LinkInformation info2 : reader.object().getGroupMemberInformation("/", true))
+        {
+            map.put(info2.getPath(), info2);
+        }
+        assertEquals(4, map.size());
+        info = map.get(groupName1);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.GROUP, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get(groupName2);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.GROUP, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get("/" + dataSetName2);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.DATASET, info.getType());
+        assertNull(info.tryGetSymbolicLinkTarget());
+        info = map.get(linkName);
+        assertNotNull(info);
+        assertTrue(info.exists());
+        assertEquals(HDF5ObjectType.SOFT_LINK, info.getType());
+        assertEquals(dataSetName2, info.tryGetSymbolicLinkTarget());
+
+        reader.close();
+    }
+
+    @Test
+    public void testHardLink()
+    {
+        final File linkFile = new File(workingDirectory, "hardLink.h5");
+        linkFile.delete();
+        assertFalse(linkFile.exists());
+        linkFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(linkFile);
+        final String str = "BlaBlub";
+        writer.string().write("/data/set", str);
+        writer.object().createHardLink("/data/set", "/data/link");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(linkFile);
+        assertEquals(HDF5ObjectType.DATASET, reader.object().getObjectType("/data/link"));
+        assertEquals(str, reader.readString("/data/link"));
+        reader.close();
+    }
+
+    @Test
+    public void testSoftLink()
+    {
+        final File linkFile = new File(workingDirectory, "softLink.h5");
+        linkFile.delete();
+        assertFalse(linkFile.exists());
+        linkFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(linkFile);
+        writer.writeBoolean("/data/set", true);
+        writer.object().createSoftLink("/data/set", "/data/link");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(linkFile);
+        assertEquals(HDF5ObjectType.SOFT_LINK, reader.object().getObjectType("/data/link", false));
+        assertEquals("/data/set", reader.object().getLinkInformation("/data/link")
+                .tryGetSymbolicLinkTarget());
+        reader.close();
+    }
+
+    @Test
+    public void testUpdateSoftLink()
+    {
+        final File file = new File(workingDirectory, "updateSoftLink.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final long now = System.currentTimeMillis();
+        final String dataSetName1 = "creationTime1";
+        final String dataSetName2 = "creationTime2";
+        final String linkName = "time";
+        writer.time().write(dataSetName1, now);
+        writer.time().write(dataSetName2, now);
+        writer.object().createSoftLink(dataSetName1, linkName);
+        writer.object().createOrUpdateSoftLink(dataSetName2, linkName);
+        try
+        {
+            writer.object().createOrUpdateSoftLink(dataSetName1, dataSetName2);
+        } catch (HDF5LibraryException ex)
+        {
+            assertEquals(HDF5Constants.H5E_EXISTS, ex.getMinorErrorNumber());
+        }
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertEquals(dataSetName2, reader.object().getLinkInformation(linkName)
+                .tryGetSymbolicLinkTarget());
+        reader.close();
+    }
+
+    @Test
+    public void testBrokenSoftLink()
+    {
+        final File linkFile = new File(workingDirectory, "brokenSoftLink.h5");
+        linkFile.delete();
+        assertFalse(linkFile.exists());
+        linkFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(linkFile);
+        writer.object().createSoftLink("/does/not/exist", "/linkToNowhere");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(linkFile);
+        assertFalse(reader.exists("/linkToNowhere"));
+        assertTrue(reader.object().exists("/linkToNowhere", false));
+        assertEquals(HDF5ObjectType.SOFT_LINK,
+                reader.object().getObjectType("/linkToNowhere", false));
+        assertEquals("/does/not/exist", reader.object().getLinkInformation("/linkToNowhere")
+                .tryGetSymbolicLinkTarget());
+        reader.close();
+    }
+
+    @Test
+    public void testDeleteSoftLink()
+    {
+        final File linkFile = new File(workingDirectory, "deleteSoftLink.h5");
+        linkFile.delete();
+        assertFalse(linkFile.exists());
+        linkFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(linkFile);
+        writer.writeBoolean("/group/boolean", true);
+        writer.object().createSoftLink("/group", "/link");
+        writer.delete("/link");
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(linkFile);
+        assertFalse(reader.object().exists("/link", false));
+        assertTrue(reader.exists("/group"));
+        assertTrue(reader.exists("/group/boolean"));
+        reader.close();
+    }
+
+    @Test
+    public void testNullOnGetSymbolicLinkTargetForNoLink()
+    {
+        final File noLinkFile = new File(workingDirectory, "noLink.h5");
+        noLinkFile.delete();
+        assertFalse(noLinkFile.exists());
+        noLinkFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(noLinkFile);
+        writer.writeBoolean("/data/set", true);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(noLinkFile);
+        try
+        {
+            assertNull(reader.object().getLinkInformation("/data/set").tryGetSymbolicLinkTarget());
+        } finally
+        {
+            reader.close();
+        }
+    }
+
+    @Test
+    public void testExternalLink()
+    {
+        final File fileToLinkTo = new File(workingDirectory, "fileToLinkTo.h5");
+        fileToLinkTo.delete();
+        assertFalse(fileToLinkTo.exists());
+        fileToLinkTo.deleteOnExit();
+        final IHDF5Writer writer1 = HDF5FactoryProvider.get().open(fileToLinkTo);
+        final String dataSetName = "/data/set";
+        final String dataSetValue = "Some data set value...";
+        writer1.string().write(dataSetName, dataSetValue);
+        writer1.close();
+        final File linkFile = new File(workingDirectory, "externalLink.h5");
+        linkFile.delete();
+        assertFalse(linkFile.exists());
+        linkFile.deleteOnExit();
+        final IHDF5Writer writer2 = HDF5FactoryProvider.get().open(linkFile);
+        final String linkName = "/data/link";
+        writer2.object().createExternalLink(fileToLinkTo.getPath(), dataSetName, linkName);
+        writer2.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(linkFile);
+        assertEquals(HDF5ObjectType.EXTERNAL_LINK, reader.object().getObjectType(linkName, false));
+        assertEquals(dataSetValue, reader.readString(linkName));
+        final String expectedLink =
+                OSUtilities.isWindows() ? "EXTERNAL::targets\\unit-test-wd\\hdf5-roundtrip-wd\\fileToLinkTo.h5::/data/set"
+                        : "EXTERNAL::targets/unit-test-wd/hdf5-roundtrip-wd/fileToLinkTo.h5::/data/set";
+        assertEquals(expectedLink, reader.object().getLinkInformation(linkName)
+                .tryGetSymbolicLinkTarget());
+        reader.close();
+    }
+
+    @Test
+    public void testDataTypeInfoOptions()
+    {
+        final File file = new File(workingDirectory, "dataTypeInfoOptions.h5");
+        final String enumDsName = "/testEnum";
+        final String dateDsName = "/testDate";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).writer();
+        writer.enumeration().write(enumDsName, JavaEnum.TWO);
+        writer.time().write(dateDsName, new Date(10000L));
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5DataTypeInformation minimalEnumInfo =
+                reader.object().getDataSetInformation(enumDsName, DataTypeInfoOptions.MINIMAL)
+                        .getTypeInformation();
+        assertFalse(minimalEnumInfo.knowsDataTypePath());
+        assertFalse(minimalEnumInfo.knowsDataTypeVariant());
+        assertNull(minimalEnumInfo.tryGetName());
+        assertNull(minimalEnumInfo.tryGetTypeVariant());
+        final HDF5DataTypeInformation defaultInfo =
+                reader.getDataSetInformation(enumDsName).getTypeInformation();
+        assertFalse(defaultInfo.knowsDataTypePath());
+        assertTrue(defaultInfo.knowsDataTypeVariant());
+        assertNull(defaultInfo.tryGetName());
+        assertEquals(HDF5DataTypeVariant.NONE, defaultInfo.tryGetTypeVariant());
+        final HDF5DataTypeInformation allInfo =
+                reader.object().getDataSetInformation(enumDsName, DataTypeInfoOptions.ALL)
+                        .getTypeInformation();
+        assertTrue(allInfo.knowsDataTypePath());
+        assertTrue(allInfo.knowsDataTypeVariant());
+        assertEquals(JavaEnum.class.getSimpleName(), allInfo.tryGetName());
+
+        final HDF5DataTypeInformation minimalDateInfo =
+                reader.object().getDataSetInformation(dateDsName, DataTypeInfoOptions.MINIMAL)
+                        .getTypeInformation();
+        assertFalse(minimalDateInfo.knowsDataTypePath());
+        assertFalse(minimalDateInfo.knowsDataTypeVariant());
+        assertNull(minimalDateInfo.tryGetName());
+        assertNull(minimalDateInfo.tryGetTypeVariant());
+
+        final HDF5DataTypeInformation defaultDateInfo =
+                reader.object().getDataSetInformation(dateDsName, DataTypeInfoOptions.DEFAULT)
+                        .getTypeInformation();
+        assertFalse(defaultDateInfo.knowsDataTypePath());
+        assertTrue(defaultDateInfo.knowsDataTypeVariant());
+        assertNull(defaultDateInfo.tryGetName());
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                defaultDateInfo.tryGetTypeVariant());
+
+        final HDF5DataTypeInformation allDateInfo =
+                reader.object().getDataSetInformation(dateDsName, DataTypeInfoOptions.ALL)
+                        .getTypeInformation();
+        assertTrue(allDateInfo.knowsDataTypePath());
+        assertTrue(allDateInfo.knowsDataTypeVariant());
+        assertNull(allDateInfo.tryGetName());
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                allDateInfo.tryGetTypeVariant());
+
+        reader.close();
+    }
+
+    enum JavaEnum
+    {
+        ONE, TWO, THREE
+    }
+
+    @Test
+    public void testJavaEnum()
+    {
+        final File file = new File(workingDirectory, "javaEnum.h5");
+        final String dsName = "/testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).keepDataSetsIfTheyExist().writer();
+        writer.enumeration().write(dsName, JavaEnum.THREE);
+        writer.enumeration().setAttr(dsName, "attr", JavaEnum.TWO);
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        assertEquals(JavaEnum.THREE, reader.enumeration().read(dsName, JavaEnum.class));
+        assertEquals(JavaEnum.TWO,
+                reader.enumeration().getAttr(dsName, "attr").getValue(JavaEnum.class));
+        final String valueStr = reader.readEnumAsString(dsName);
+        assertEquals("THREE", valueStr);
+        final HDF5EnumerationValue value = reader.enumeration().read(dsName);
+        assertEquals("THREE", value.getValue());
+        final String expectedDataTypePath =
+                HDF5Utils.createDataTypePath(HDF5Utils.ENUM_PREFIX, "",
+                        JavaEnum.class.getSimpleName());
+        assertEquals(expectedDataTypePath, reader.object().tryGetDataTypePath(value.getType()));
+        assertEquals(expectedDataTypePath, reader.object().tryGetDataTypePath(dsName));
+        final HDF5EnumerationType type = reader.enumeration().getDataSetType(dsName);
+        assertEquals(3, type.getValues().size());
+        assertEquals("ONE", type.getValues().get(0));
+        assertEquals("TWO", type.getValues().get(1));
+        assertEquals("THREE", type.getValues().get(2));
+        reader.close();
+    }
+
+    @Test
+    public void testEnum()
+    {
+        final File file = new File(workingDirectory, "enum.h5");
+        final String enumTypeName = "testEnumType";
+        final String dsName = "/testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).keepDataSetsIfTheyExist().writer();
+        HDF5EnumerationType type = writer.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        writer.enumeration().write(dsName, new HDF5EnumerationValue(type, "THREE"));
+        // That is wrong, but we disable the check, so no exception should be thrown.
+        writer.enumeration().getType(enumTypeName, new String[]
+            { "THREE", "ONE", "TWO" }, false);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        type = reader.enumeration().getType(enumTypeName);
+        assertEquals(enumTypeName, type.tryGetName());
+        final HDF5DataTypeInformation typeInfo =
+                reader.object().getDataSetInformation(dsName, DataTypeInfoOptions.ALL)
+                        .getTypeInformation();
+        assertEquals(enumTypeName, typeInfo.tryGetName());
+        assertEquals(HDF5Utils.createDataTypePath(HDF5Utils.ENUM_PREFIX, "", enumTypeName),
+                typeInfo.tryGetDataTypePath());
+        final String valueStr = reader.readEnumAsString(dsName);
+        assertEquals("THREE", valueStr);
+        final HDF5EnumerationValue value = reader.enumeration().read(dsName);
+        assertEquals("THREE", value.getValue());
+        final String expectedDataTypePath =
+                HDF5Utils.createDataTypePath(HDF5Utils.ENUM_PREFIX, "", enumTypeName);
+        assertEquals(expectedDataTypePath, reader.object().tryGetDataTypePath(value.getType()));
+        assertEquals(expectedDataTypePath, reader.object().tryGetDataTypePath(dsName));
+        type = reader.enumeration().getDataSetType(dsName);
+        assertEquals("THREE", reader.enumeration().read(dsName, type).getValue());
+        reader.close();
+        final IHDF5Writer writer2 = HDF5FactoryProvider.get().open(file);
+        type = writer2.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, true);
+        assertEquals("THREE", writer2.enumeration().read(dsName, type).getValue());
+        writer2.close();
+    }
+
+    enum NumberEnum
+    {
+        ONE, TWO, THREE, FOUR, FIVE
+    }
+
+    @Test
+    public void testAnonymousEnum()
+    {
+        final File file = new File(workingDirectory, "anonymousEnum.h5");
+        final String dsName = "/testEnum";
+        final String dsName2 = "/testEnum2";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).keepDataSetsIfTheyExist().writer();
+        HDF5EnumerationType type = writer.enumeration().getAnonType(new String[]
+            { "ONE", "TWO", "THREE", "FOUR", "INFINITY" });
+        writer.enumeration().write(dsName, new HDF5EnumerationValue(type, "INFINITY"));
+        HDF5EnumerationType type2 = writer.enumeration().getAnonType(NumberEnum.class);
+        writer.enumeration().write(dsName2, new HDF5EnumerationValue(type2, NumberEnum.FIVE));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertEquals("INFINITY", reader.readEnumAsString(dsName));
+        assertEquals("INFINITY", reader.enumeration().read(dsName).getValue());
+        assertEquals("FIVE", reader.readEnumAsString(dsName2));
+        assertEquals(NumberEnum.FIVE, reader.enumeration().read(dsName2).getValue(NumberEnum.class));
+        reader.close();
+    }
+
+    @Test
+    public void testEnum16()
+    {
+        final File file = new File(workingDirectory, "enum16bit.h5");
+        final String enumTypeName = "testEnumType16";
+        final String dsName = "/testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).keepDataSetsIfTheyExist().writer();
+        HDF5EnumerationType type = createEnum16Bit(writer, enumTypeName);
+        writer.enumeration().write(dsName, new HDF5EnumerationValue(type, "17"));
+        final String[] confusedValues = new String[type.getEnumType().getValueArray().length];
+        System.arraycopy(confusedValues, 0, confusedValues, 1, confusedValues.length - 1);
+        confusedValues[0] = "XXX";
+        // This is wrong, but we disabled the check.
+        writer.enumeration().getType(enumTypeName, confusedValues, false);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        type = reader.enumeration().getType(enumTypeName);
+        final String valueStr = reader.readEnumAsString(dsName);
+        assertEquals("17", valueStr);
+        final HDF5EnumerationValue value = reader.enumeration().read(dsName);
+        assertEquals("17", value.getValue());
+        type = reader.enumeration().getDataSetType(dsName);
+        assertEquals("17", reader.enumeration().read(dsName, type).getValue());
+        reader.close();
+        final IHDF5Writer writer2 = HDF5FactoryProvider.get().open(file);
+        type =
+                writer2.enumeration().getType(enumTypeName, type.getEnumType().getValueArray(),
+                        true);
+        assertEquals("17", writer2.enumeration().read(dsName, type).getValue());
+        // That is wrong, but we disable the check, so no exception should be thrown.
+        writer2.close();
+    }
+
+    @Test(expectedExceptions = HDF5JavaException.class)
+    public void testConfusedEnum()
+    {
+        final File file = new File(workingDirectory, "confusedEnum.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        IHDF5Writer writer = HDF5Factory.open(file);
+        HDF5EnumerationType type = writer.enumeration().getType("testEnum", new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        writer.enumeration().write("/testEnum", new HDF5EnumerationValue(type, 2));
+        writer.close();
+        try
+        {
+            writer = HDF5Factory.configure(file).keepDataSetsIfTheyExist().writer();
+            writer.enumeration().getType("testEnum", new String[]
+                { "THREE", "ONE", "TWO" }, true);
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    @Test
+    public void testReplaceConfusedEnum()
+    {
+        final File file = new File(workingDirectory, "replaceConfusedEnum.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        IHDF5Writer writer = HDF5Factory.open(file);
+        HDF5EnumerationType type = writer.enumeration().getType("testEnum", new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        writer.enumeration().write("/testEnum", new HDF5EnumerationValue(type, 2));
+        writer.close();
+        writer = HDF5Factory.open(file);
+        final HDF5EnumerationType type2 = writer.enumeration().getType("testEnum", new String[]
+            { "THREE", "ONE", "TWO" }, true);
+        assertEquals("testEnum", type2.getName());
+        assertEquals("testEnum__REPLACED_1", writer.enumeration().getDataSetType("/testEnum")
+                .getName());
+        writer.close();
+    }
+
+    @Test
+    public void testEnumArray()
+    {
+        final File file = new File(workingDirectory, "enumArray.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = writer.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        HDF5EnumerationValueArray arrayWritten =
+                new HDF5EnumerationValueArray(enumType, new String[]
+                    { "TWO", "ONE", "THREE" });
+        writer.enumeration().writeArray("/testEnum", arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5EnumerationValueArray arrayRead = reader.enumeration().readArray("/testEnum");
+        enumType = reader.enumeration().getDataSetType("/testEnum");
+        final HDF5EnumerationValueArray arrayRead2 =
+                reader.enumeration().readArray("/testEnum", enumType);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.getLength(), stringArrayRead.length);
+        assertEquals(arrayWritten.getLength(), arrayRead.getLength());
+        assertEquals(arrayWritten.getLength(), arrayRead2.getLength());
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead2.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead[i]);
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumMDArray()
+    {
+        final File file = new File(workingDirectory, "enumMDArray.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationValueMDArray arrayWritten =
+                writer.enumeration().newMDArray(
+                        enumTypeName,
+                        new String[]
+                            { "ONE", "TWO", "THREE" },
+                        new MDArray<String>(new String[]
+                            { "TWO", "ONE", "THREE", "TWO", "ONE", "THREE", "TWO", "ONE", "THREE",
+                                    "TWO", "ONE", "THREE", "TWO", "ONE", "THREE", "TWO", "ONE",
+                                    "THREE", "TWO", "ONE", "THREE", "TWO", "ONE", "THREE" },
+                                new int[]
+                                    { 2, 3, 4 }));
+        writer.enumeration().writeMDArray("/testEnum", arrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5EnumerationValueMDArray arrayRead = reader.enumeration().readMDArray("/testEnum");
+        final HDF5EnumerationType enumType = reader.enumeration().getDataSetType("/testEnum");
+        final HDF5EnumerationValueMDArray arrayRead2 =
+                reader.enumeration().readMDArray("/testEnum", enumType);
+        final MDArray<String> stringArrayRead = arrayRead2.toStringArray();
+        assertTrue(Arrays.equals(arrayWritten.dimensions(), stringArrayRead.dimensions()));
+        assertTrue(Arrays.equals(arrayWritten.dimensions(), arrayRead.dimensions()));
+        assertTrue(Arrays.equals(arrayWritten.dimensions(), arrayRead2.dimensions()));
+        for (int i = 0; i < stringArrayRead.size(); ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead2.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead.get(i));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumMDArrayBlockWise()
+    {
+        final File file = new File(workingDirectory, "enumMDArrayBlockWise.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = writer.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        HDF5EnumerationValueMDArray arrayBlockWritten1 =
+                new HDF5EnumerationValueMDArray(enumType, new MDArray<String>(new String[]
+                    { "TWO", "ONE", "THREE", "THREE", "TWO", "ONE", }, new int[]
+                    { 2, 3, 1 }));
+        HDF5EnumerationValueMDArray arrayBlockWritten2 =
+                new HDF5EnumerationValueMDArray(enumType, new MDArray<String>(new String[]
+                    { "ONE", "TWO", "THREE", "THREE", "TWO", "ONE", }, new int[]
+                    { 2, 3, 1 }));
+        HDF5EnumerationValueMDArray arrayBlockWritten3 =
+                new HDF5EnumerationValueMDArray(enumType, new MDArray<String>(new String[]
+                    { "THREE", "TWO", "ONE", "ONE", "TWO", "THREE", }, new int[]
+                    { 2, 3, 1 }));
+        writer.enumeration().createMDArray("/testEnum", enumType, new int[]
+            { 2, 3, 1 });
+        for (int i = 0; i < 4; ++i)
+        {
+            writer.enumeration().writeMDArrayBlock("/testEnum", arrayBlockWritten1, new long[]
+                { 0, 0, i });
+            writer.enumeration().writeMDArrayBlock("/testEnum", arrayBlockWritten2, new long[]
+                { 1, 0, i });
+            writer.enumeration().writeMDArrayBlock("/testEnum", arrayBlockWritten3, new long[]
+                { 0, 1, i });
+        }
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertTrue(Arrays.equals(new long[]
+            { 4, 6, 4 }, reader.getDataSetInformation("/testEnum").getDimensions()));
+        for (int i = 0; i < 4; ++i)
+        {
+            assertEquals(arrayBlockWritten1,
+                    reader.enumeration().readMDArrayBlock("/testEnum", new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 0, 0, i }));
+            assertEquals(arrayBlockWritten2,
+                    reader.enumeration().readMDArrayBlock("/testEnum", new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 1, 0, i }));
+            assertEquals(arrayBlockWritten3,
+                    reader.enumeration().readMDArrayBlock("/testEnum", new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 0, 1, i }));
+        }
+
+        enumType = reader.enumeration().getDataSetType("/testEnum");
+        for (int i = 0; i < 4; ++i)
+        {
+            assertEquals(arrayBlockWritten1,
+                    reader.enumeration().readMDArrayBlock("/testEnum", enumType, new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 0, 0, i }));
+            assertEquals(arrayBlockWritten2,
+                    reader.enumeration().readMDArrayBlock("/testEnum", enumType, new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 1, 0, i }));
+            assertEquals(arrayBlockWritten3,
+                    reader.enumeration().readMDArrayBlock("/testEnum", enumType, new int[]
+                        { 2, 3, 1 }, new long[]
+                        { 0, 1, i }));
+        }
+        for (HDF5MDEnumBlock block : reader.enumeration().getMDArrayBlocks("/testEnum", enumType))
+        {
+            assertTrue(Long.toString(block.getIndex()[2]),
+                    block.getIndex()[2] >= 0 && block.getIndex()[2] < 4);
+            if (block.getIndex()[0] == 0 && block.getIndex()[1] == 0)
+            {
+                assertEquals(arrayBlockWritten1, block.getData());
+            } else if (block.getIndex()[0] == 0 && block.getIndex()[1] == 1)
+            {
+                assertEquals(arrayBlockWritten3, block.getData());
+            } else if (block.getIndex()[0] == 1 && block.getIndex()[1] == 0)
+            {
+                assertEquals(arrayBlockWritten2, block.getData());
+            } else if (block.getIndex()[0] == 1 && block.getIndex()[1] == 1)
+            {
+                assertTrue(Arrays.equals(new int[]
+                    { 2, 3, 1 }, block.getData().dimensions()));
+                assertTrue(Arrays.equals(new byte[6], ((MDByteArray) block.getData()
+                        .getOrdinalValues()).getAsFlatArray()));
+            } else
+            {
+                fail("Unexpected index " + Arrays.toString(block.getIndex()));
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testJavaEnumArray()
+    {
+        final File file = new File(workingDirectory, "javaEnumArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final JavaEnum[] arrayWritten = new JavaEnum[]
+            { JavaEnum.TWO, JavaEnum.ONE, JavaEnum.THREE, JavaEnum.ONE };
+        final JavaEnum[] arrayBlockTwoWritten = new JavaEnum[]
+            { JavaEnum.THREE, JavaEnum.ONE, JavaEnum.TWO, JavaEnum.THREE };
+        writer.enumeration().writeArray("/testEnum", writer.enumeration().newArray(arrayWritten));
+        final HDF5EnumerationType type =
+                writer.enumeration().createArray("/testEnumBlockwise",
+                        writer.enumeration().getType(JavaEnum.class), 16);
+        writer.enumeration().writeArrayBlock("/testEnumBlockwise",
+                new HDF5EnumerationValueArray(type, arrayWritten), 0);
+        writer.enumeration().writeArrayBlock("/testEnumBlockwise",
+                writer.enumeration().newArray(arrayBlockTwoWritten), 1);
+        writer.enumeration().writeArrayBlockWithOffset("/testEnumBlockwise",
+                new HDF5EnumerationValueArray(type, arrayBlockTwoWritten),
+                arrayBlockTwoWritten.length, 8);
+        writer.enumeration().writeArrayBlockWithOffset("/testEnumBlockwise",
+                writer.enumeration().newArray(arrayWritten), arrayWritten.length, 12);
+        final JavaEnum[] attributeArrayWritten = new JavaEnum[]
+            { JavaEnum.THREE, JavaEnum.ONE, JavaEnum.TWO };
+        writer.enumeration().setArrayAttr("/testEnum", "attr",
+                writer.enumeration().newArray(attributeArrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final JavaEnum[] arrayRead =
+                reader.enumeration().readArray("/testEnum").toEnumArray(JavaEnum.class);
+        final JavaEnum[] attributeArrayRead =
+                reader.enumeration().getArrayAttr("/testEnum", "attr").toEnumArray(JavaEnum.class);
+        final JavaEnum[] arrayBlockRead0 =
+                reader.enumeration().readArrayBlock("/testEnumBlockwise", 4, 0)
+                        .toEnumArray(JavaEnum.class);
+        final JavaEnum[] arrayBlockRead1 =
+                reader.enumeration().readArrayBlock("/testEnumBlockwise", 4, 1)
+                        .toEnumArray(JavaEnum.class);
+        final JavaEnum[] arrayBlockRead2 =
+                reader.enumeration().readArrayBlock("/testEnumBlockwise", 4, 2)
+                        .toEnumArray(JavaEnum.class);
+        final JavaEnum[] arrayBlockRead3 =
+                reader.enumeration().readArrayBlock("/testEnumBlockwise", 4, 3)
+                        .toEnumArray(JavaEnum.class);
+        reader.close();
+        assertEquals(arrayWritten.length, arrayRead.length);
+        for (int i = 0; i < arrayWritten.length; ++i)
+        {
+            assertEquals(arrayWritten[i], arrayRead[i]);
+        }
+        assertEquals(attributeArrayWritten.length, attributeArrayRead.length);
+        for (int i = 0; i < attributeArrayWritten.length; ++i)
+        {
+            assertEquals(attributeArrayWritten[i], attributeArrayRead[i]);
+        }
+        assertEquals(arrayWritten.length, arrayBlockRead0.length);
+        assertEquals(arrayWritten.length, arrayBlockRead1.length);
+        assertEquals(arrayWritten.length, arrayBlockRead2.length);
+        assertEquals(arrayWritten.length, arrayBlockRead3.length);
+        for (int i = 0; i < arrayWritten.length; ++i)
+        {
+            assertEquals(arrayWritten[i], arrayBlockRead0[i]);
+            assertEquals(arrayBlockTwoWritten[i], arrayBlockRead1[i]);
+            assertEquals(arrayBlockTwoWritten[i], arrayBlockRead2[i]);
+            assertEquals(arrayWritten[i], arrayBlockRead3[i]);
+        }
+    }
+
+    @Test
+    public void testEnumArrayBlock()
+    {
+        final File file = new File(workingDirectory, "enumArrayBlock.h5");
+        final String enumTypeName = "testEnum";
+        final int chunkSize = 4;
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = writer.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        writer.enumeration().createArray("/testEnum", enumType, chunkSize);
+        HDF5EnumerationValueArray arrayWritten =
+                new HDF5EnumerationValueArray(enumType, new String[]
+                    { "TWO", "ONE", "THREE", "TWO" });
+        writer.enumeration().writeArrayBlock("/testEnum", arrayWritten, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5EnumerationValueArray arrayReadBlock0 =
+                reader.enumeration().readArrayBlock(enumTypeName, chunkSize, 0);
+        enumType = reader.enumeration().getDataSetType(enumTypeName);
+        final HDF5EnumerationValueArray arrayReadBlock1 =
+                reader.enumeration().readArrayBlock(enumTypeName, enumType, chunkSize, 1);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray(enumTypeName).toStringArray();
+        assertEquals(arrayWritten.getLength() * 2, stringArrayRead.length);
+        assertEquals(arrayWritten.getLength(), arrayReadBlock0.getLength());
+        assertEquals(arrayWritten.getLength(), arrayReadBlock1.getLength());
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, "ONE", arrayReadBlock0.getValue(i));
+            assertEquals("Index " + i, "ONE", stringArrayRead[i]);
+        }
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayReadBlock1.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead[chunkSize + i]);
+        }
+        final HDF5EnumerationValueArray[] dataBlocksExpected = new HDF5EnumerationValueArray[]
+            { arrayReadBlock0, arrayReadBlock1 };
+        int blockIndex = 0;
+        for (HDF5DataBlock<HDF5EnumerationValueArray> block : reader.enumeration().getArrayBlocks(
+                enumTypeName, enumType))
+        {
+            final HDF5EnumerationValueArray blockExpected = dataBlocksExpected[blockIndex++];
+            final HDF5EnumerationValueArray blockRead = block.getData();
+            assertEquals(chunkSize, blockRead.getLength());
+            for (int i = 0; i < blockExpected.getLength(); ++i)
+            {
+                assertEquals("Index " + i, blockExpected.getValue(i), blockRead.getValue(i));
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArrayBlockScalingCompression()
+    {
+        final File file = new File(workingDirectory, "enumArrayBlockScalingCompression.h5");
+        final String enumTypeName = "testEnum";
+        final int chunkSize = 4;
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = writer.enumeration().getType(enumTypeName, new String[]
+            { "ONE", "TWO", "THREE" }, false);
+        writer.enumeration().createArray("/testEnum", enumType, 0, chunkSize,
+                HDF5IntStorageFeatures.INT_AUTO_SCALING);
+        HDF5EnumerationValueArray arrayWritten =
+                new HDF5EnumerationValueArray(enumType, new String[]
+                    { "TWO", "ONE", "THREE", "ONE" });
+        writer.enumeration().writeArrayBlock("/testEnum", arrayWritten, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5EnumerationValueArray arrayReadBlock0 =
+                reader.enumeration().readArrayBlock(enumTypeName, chunkSize, 0);
+        enumType = reader.enumeration().getDataSetType(enumTypeName);
+        final HDF5EnumerationValueArray arrayReadBlock1 =
+                reader.enumeration().readArrayBlock(enumTypeName, enumType, chunkSize, 1);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray(enumTypeName).toStringArray();
+        assertEquals(arrayWritten.getLength() * 2, stringArrayRead.length);
+        assertEquals(arrayWritten.getLength(), arrayReadBlock0.getLength());
+        assertEquals(arrayWritten.getLength(), arrayReadBlock1.getLength());
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, "ONE", arrayReadBlock0.getValue(i));
+            assertEquals("Index " + i, "ONE", stringArrayRead[i]);
+        }
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayReadBlock1.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead[chunkSize + i]);
+        }
+        final HDF5EnumerationValueArray[] dataBlocksExpected = new HDF5EnumerationValueArray[]
+            { arrayReadBlock0, arrayReadBlock1 };
+        int blockIndex = 0;
+        for (HDF5DataBlock<HDF5EnumerationValueArray> block : reader.enumeration().getArrayBlocks(
+                enumTypeName, enumType))
+        {
+            final HDF5EnumerationValueArray blockExpected = dataBlocksExpected[blockIndex++];
+            final HDF5EnumerationValueArray blockRead = block.getData();
+            assertEquals(chunkSize, blockRead.getLength());
+            for (int i = 0; i < blockExpected.getLength(); ++i)
+            {
+                assertEquals("Index " + i, blockExpected.getValue(i), blockRead.getValue(i));
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArray16BitFromIntArray()
+    {
+        final File file = new File(workingDirectory, "enumArray16BitFromIntArray.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType = createEnum16Bit(writer, enumTypeName);
+        final int[] arrayWritten = new int[]
+            { 8, 16, 722, 913, 333 };
+        writer.enumeration().writeArray("/testEnum",
+                new HDF5EnumerationValueArray(enumType, arrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.length, stringArrayRead.length);
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    stringArrayRead[i]);
+        }
+        final HDF5EnumerationValueArray arrayRead = reader.enumeration().readArray("/testEnum");
+        assertEquals(arrayWritten.length, arrayRead.getLength());
+        for (int i = 0; i < arrayRead.getLength(); ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    arrayRead.getValue(i));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArray16BitFromIntArrayScaled()
+    {
+        final File file = new File(workingDirectory, "testEnumArray16BitFromIntArrayScaled.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType = createEnum16Bit(writer, enumTypeName);
+        final int[] arrayWritten = new int[]
+            { 8, 16, 722, 913, 333 };
+        writer.enumeration().writeArray("/testEnum",
+                new HDF5EnumerationValueArray(enumType, arrayWritten),
+                HDF5IntStorageFeatures.INT_AUTO_SCALING);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.length, stringArrayRead.length);
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    stringArrayRead[i]);
+        }
+        final HDF5EnumerationValueArray arrayRead = reader.enumeration().readArray("/testEnum");
+        assertEquals(arrayWritten.length, arrayRead.getLength());
+        for (int i = 0; i < arrayRead.getLength(); ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    arrayRead.getValue(i));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArray16BitFromIntArrayLarge()
+    {
+        final File file = new File(workingDirectory, "enumArray16BitFromIntArrayLarge.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType = createEnum16Bit(writer, enumTypeName);
+        final int[] arrayWritten = new int[100];
+        for (int i = 0; i < arrayWritten.length; ++i)
+        {
+            arrayWritten[i] = 10 * i;
+        }
+        writer.enumeration().writeArray("/testEnum",
+                new HDF5EnumerationValueArray(enumType, arrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.length, stringArrayRead.length);
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    stringArrayRead[i]);
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArrayBlock16Bit()
+    {
+        final File file = new File(workingDirectory, "enumArrayBlock16Bit.h5");
+        final String enumTypeName = "testEnum";
+        final int chunkSize = 4;
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = createEnum16Bit(writer, enumTypeName);
+        writer.enumeration().createArray("/testEnum", enumType, chunkSize);
+        final HDF5EnumerationValueArray arrayWritten =
+                new HDF5EnumerationValueArray(enumType, new int[]
+                    { 8, 16, 722, 913 });
+        writer.enumeration().writeArrayBlock("/testEnum", arrayWritten, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5EnumerationValueArray arrayReadBlock0 =
+                reader.enumeration().readArrayBlock(enumTypeName, chunkSize, 0);
+        enumType = reader.enumeration().getDataSetType(enumTypeName);
+        final HDF5EnumerationValueArray arrayReadBlock1 =
+                reader.enumeration().readArrayBlock(enumTypeName, enumType, chunkSize, 1);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray(enumTypeName).toStringArray();
+        assertEquals(arrayWritten.getLength() * 2, stringArrayRead.length);
+        assertEquals(arrayWritten.getLength(), arrayReadBlock0.getLength());
+        assertEquals(arrayWritten.getLength(), arrayReadBlock1.getLength());
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, "0", arrayReadBlock0.getValue(i));
+            assertEquals("Index " + i, "0", stringArrayRead[i]);
+        }
+        for (int i = 0; i < arrayReadBlock0.getLength(); ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayReadBlock1.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead[chunkSize + i]);
+        }
+        final HDF5EnumerationValueArray[] dataBlocksExpected = new HDF5EnumerationValueArray[]
+            { arrayReadBlock0, arrayReadBlock1 };
+        int blockIndex = 0;
+        for (HDF5DataBlock<HDF5EnumerationValueArray> block : reader.enumeration().getArrayBlocks(
+                enumTypeName, enumType))
+        {
+            final HDF5EnumerationValueArray blockExpected = dataBlocksExpected[blockIndex++];
+            final HDF5EnumerationValueArray blockRead = block.getData();
+            assertEquals(chunkSize, blockRead.getLength());
+            for (int i = 0; i < blockExpected.getLength(); ++i)
+            {
+                assertEquals("Index " + i, blockExpected.getValue(i), blockRead.getValue(i));
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testEnumArrayScaleCompression()
+    {
+        final File file = new File(workingDirectory, "enumArrayScaleCompression.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5EnumerationType enumType = writer.enumeration().getType(enumTypeName, new String[]
+            { "A", "C", "G", "T" }, false);
+        final Random rng = new Random();
+        final String[] arrayWrittenString = new String[100000];
+        for (int i = 0; i < arrayWrittenString.length; ++i)
+        {
+            arrayWrittenString[i] = enumType.getValues().get(rng.nextInt(4));
+        }
+        final HDF5EnumerationValueArray arrayWritten =
+                new HDF5EnumerationValueArray(enumType, arrayWrittenString);
+        writer.enumeration().writeArray("/testEnum", arrayWritten, INT_AUTO_SCALING);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertEquals(HDF5DataClass.ENUM, reader.object().getDataSetInformation("/testEnum")
+                .getTypeInformation().getDataClass());
+        final HDF5EnumerationValueArray arrayRead = reader.enumeration().readArray("/testEnum");
+        enumType = reader.enumeration().getDataSetType("/testEnum");
+        final HDF5EnumerationValueArray arrayRead2 =
+                reader.enumeration().readArray("/testEnum", enumType);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.getLength(), stringArrayRead.length);
+        assertEquals(arrayWritten.getLength(), arrayRead.getLength());
+        assertEquals(arrayWritten.getLength(), arrayRead2.getLength());
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), arrayRead2.getValue(i));
+            assertEquals("Index " + i, arrayWritten.getValue(i), stringArrayRead[i]);
+        }
+        reader.close();
+
+        // Shouldn't work in strict HDF5 1.6 mode.
+        final File file2 = new File(workingDirectory, "scaleoffsetfilterenumfailed.h5");
+        file2.delete();
+        assertFalse(file2.exists());
+        file2.deleteOnExit();
+        final IHDF5Writer writer2 =
+                HDF5FactoryProvider.get().configure(file2).fileFormat(FileFormat.STRICTLY_1_6)
+                        .writer();
+        HDF5EnumerationType enumType2 = writer2.enumeration().getType(enumTypeName, new String[]
+            { "A", "C", "G", "T" }, false);
+        final HDF5EnumerationValueArray arrayWritten2 =
+                new HDF5EnumerationValueArray(enumType2, arrayWrittenString);
+        try
+        {
+            writer2.enumeration().writeArray("/testEnum", arrayWritten2, INT_AUTO_SCALING);
+            fail("Usage of scaling compression in strict HDF5 1.6 mode not detected");
+        } catch (IllegalStateException ex)
+        {
+            assertTrue(ex.getMessage().indexOf("not allowed") >= 0);
+        }
+        writer2.close();
+    }
+
+    @Test
+    public void testOpaqueType()
+    {
+        final File file = new File(workingDirectory, "opaqueType.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final String opaqueDataSetName = "/opaque/ds";
+        final String byteArrayDataSetName = "/bytearr/ds";
+        final String opaqueTag = "my opaque type";
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final byte[] byteArrayWritten = new byte[]
+            { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
+        writer.int8().writeArray(byteArrayDataSetName, byteArrayWritten);
+        writer.opaque().writeArray(opaqueDataSetName, opaqueTag, byteArrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        HDF5DataSetInformation info = reader.getDataSetInformation(byteArrayDataSetName);
+        assertEquals(HDF5DataClass.INTEGER, info.getTypeInformation().getDataClass());
+        assertChunkSizes(info, byteArrayWritten.length);
+        info = reader.getDataSetInformation(opaqueDataSetName);
+        assertEquals(HDF5DataClass.OPAQUE, info.getTypeInformation().getDataClass());
+        assertEquals(opaqueTag, info.getTypeInformation().tryGetOpaqueTag());
+        assertChunkSizes(info, byteArrayWritten.length);
+        assertEquals(opaqueTag, reader.opaque().tryGetOpaqueTag(opaqueDataSetName));
+        assertEquals(opaqueTag, reader.opaque().tryGetOpaqueType(opaqueDataSetName).getTag());
+        assertNull(reader.opaque().tryGetOpaqueTag(byteArrayDataSetName));
+        assertNull(reader.opaque().tryGetOpaqueType(byteArrayDataSetName));
+        final byte[] byteArrayRead = reader.readAsByteArray(byteArrayDataSetName);
+        assertTrue(Arrays.equals(byteArrayWritten, byteArrayRead));
+        final byte[] byteArrayReadOpaque = reader.readAsByteArray(opaqueDataSetName);
+        assertTrue(Arrays.equals(byteArrayWritten, byteArrayReadOpaque));
+        reader.close();
+    }
+
+    private HDF5EnumerationType createEnum16Bit(final IHDF5Writer writer, final String enumTypeName)
+    {
+        final String[] enumValues = new String[1024];
+        for (int i = 0; i < enumValues.length; ++i)
+        {
+            enumValues[i] = Integer.toString(i);
+        }
+        final HDF5EnumerationType enumType =
+                writer.enumeration().getType(enumTypeName, enumValues, false);
+        return enumType;
+    }
+
+    @Test
+    public void testEnumArrayFromIntArray()
+    {
+        final File file = new File(workingDirectory, "enumArrayFromIntArray.h5");
+        final String enumTypeName = "testEnum";
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType =
+                writer.enumeration().getType(enumTypeName, new String[]
+                    { "ONE", "TWO", "THREE" }, false);
+        final int[] arrayWritten =
+                new int[]
+                    { enumType.tryGetIndexForValue("TWO").byteValue(),
+                            enumType.tryGetIndexForValue("ONE").byteValue(),
+                            enumType.tryGetIndexForValue("THREE").byteValue() };
+        writer.enumeration().writeArray("/testEnum",
+                new HDF5EnumerationValueArray(enumType, arrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final String[] stringArrayRead =
+                reader.enumeration().readArray("/testEnum").toStringArray();
+        assertEquals(arrayWritten.length, stringArrayRead.length);
+        for (int i = 0; i < stringArrayRead.length; ++i)
+        {
+            assertEquals("Index " + i, enumType.getValues().get(arrayWritten[i]),
+                    stringArrayRead[i]);
+        }
+        reader.close();
+    }
+
+    static class Record
+    {
+        int a;
+
+        float b;
+
+        long l;
+
+        double c;
+
+        @CompoundElement(unsigned = true)
+        short d;
+
+        boolean e;
+
+        String f;
+
+        HDF5EnumerationValue g;
+
+        int[] ar;
+
+        float[] br;
+
+        long[] lr;
+
+        double[] cr;
+
+        short[] dr;
+
+        @CompoundElement(unsigned = true)
+        byte[] er;
+
+        MDIntArray fr;
+
+        char[] gr;
+
+        Record(int a, float b, long l, double c, short d, boolean e, String f,
+                HDF5EnumerationValue g, int[] ar, float[] br, long[] lr, double[] cr, short[] dr,
+                byte[] er, MDIntArray fr, char[] gr)
+        {
+            this.a = a;
+            this.b = b;
+            this.l = l;
+            this.c = c;
+            this.d = d;
+            this.e = e;
+            this.f = f;
+            this.g = g;
+            this.ar = ar;
+            this.br = br;
+            this.lr = lr;
+            this.cr = cr;
+            this.dr = dr;
+            this.er = er;
+            this.fr = fr;
+            this.gr = gr;
+        }
+
+        Record()
+        {
+        }
+
+        static HDF5CompoundMemberInformation[] getMemberInfo(HDF5EnumerationType enumType)
+        {
+            return HDF5CompoundMemberInformation.create(Record.class, "",
+                    getShuffledMapping(enumType));
+        }
+
+        static HDF5CompoundType<Record> getHDF5Type(IHDF5Reader reader)
+        {
+            final HDF5EnumerationType enumType =
+                    reader.enumeration().getType("someEnumType", new String[]
+                        { "1", "Two", "THREE" });
+            return reader.compound().getType(null, Record.class, getMapping(enumType));
+        }
+
+        private static HDF5CompoundMemberMapping[] getMapping(HDF5EnumerationType enumType)
+        {
+            return new HDF5CompoundMemberMapping[]
+                { mapping("a"), mapping("b"), mapping("l"), mapping("c"), mapping("d").unsigned(),
+                        mapping("e"), mapping("f").length(3), mapping("g").enumType(enumType),
+                        mapping("ar").length(3), mapping("br").length(2), mapping("lr").length(3),
+                        mapping("cr").length(1), mapping("dr").length(2),
+                        mapping("er").length(4).unsigned(), mapping("fr").dimensions(2, 2),
+                        mapping("gr").length(5) };
+        }
+
+        private static HDF5CompoundMemberMapping[] getShuffledMapping(HDF5EnumerationType enumType)
+        {
+            return new HDF5CompoundMemberMapping[]
+                { mapping("er").length(4), mapping("e"), mapping("b"), mapping("br").length(2),
+                        mapping("g").enumType(enumType), mapping("lr").length(3),
+                        mapping("gr").length(5), mapping("c"), mapping("ar").length(3),
+                        mapping("a"), mapping("d"), mapping("cr").length(1),
+                        mapping("f").length(3), mapping("fr").dimensions(2, 2),
+                        mapping("dr").length(2), mapping("l") };
+        }
+
+        //
+        // Object
+        //
+
+        @Override
+        public int hashCode()
+        {
+            final HashCodeBuilder builder = new HashCodeBuilder();
+            builder.append(a);
+            builder.append(b);
+            builder.append(c);
+            builder.append(d);
+            builder.append(e);
+            builder.append(f);
+            builder.append(g);
+            builder.append(ar);
+            builder.append(br);
+            builder.append(cr);
+            builder.append(dr);
+            builder.append(er);
+            return builder.toHashCode();
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (obj == null || obj instanceof Record == false)
+            {
+                return false;
+            }
+            final Record that = (Record) obj;
+            final EqualsBuilder builder = new EqualsBuilder();
+            builder.append(a, that.a);
+            builder.append(b, that.b);
+            builder.append(c, that.c);
+            builder.append(d, that.d);
+            builder.append(e, that.e);
+            builder.append(f, that.f);
+            builder.append(g, that.g);
+            builder.append(ar, that.ar);
+            builder.append(br, that.br);
+            builder.append(cr, that.cr);
+            builder.append(dr, that.dr);
+            builder.append(er, that.er);
+            builder.append(fr, that.fr);
+            return builder.isEquals();
+        }
+
+        @Override
+        public String toString()
+        {
+            final ToStringBuilder builder = new ToStringBuilder(this);
+            builder.append(a);
+            builder.append(b);
+            builder.append(c);
+            builder.append(d);
+            builder.append(e);
+            builder.append(f);
+            builder.append(g);
+            builder.append(ar);
+            builder.append(br);
+            builder.append(cr);
+            builder.append(dr);
+            builder.append(er);
+            builder.append(fr);
+            return builder.toString();
+        }
+
+    }
+
+    @Test
+    public void testCompoundAttribute()
+    {
+        final File file = new File(workingDirectory, "compoundAttribute.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final SimpleInheretingRecord recordWritten =
+                new SimpleInheretingRecord(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 } });
+        writer.compound().setAttr("/", "cpd", recordWritten);
+        final SimpleInheretingRecord[] recordArrWritten =
+                new SimpleInheretingRecord[]
+                    {
+                            new SimpleInheretingRecord(3.14159f, 100, (short) 1, "abc",
+                                    new long[][]
+                                        {
+                                            { 10, 20, 30 },
+                                            { 40, 50, 60 } }),
+                            new SimpleInheretingRecord(3.14159f, 1000, (short) 2, "def",
+                                    new long[][]
+                                        {
+                                            { 70, 80, 90 },
+                                            { 100, 110, 120 } }), };
+        writer.compound().setArrayAttr("/", "cpdArray", recordArrWritten);
+        writer.compound().setArrayAttr("/", "cpdArray", recordArrWritten);
+        final MDArray<SimpleInheretingRecord> recordMDArrWritten =
+                new MDArray<SimpleInheretingRecord>(new SimpleInheretingRecord[]
+                    {
+                            new SimpleInheretingRecord(3.14159f, 100, (short) 1, "abc",
+                                    new long[][]
+                                        {
+                                            { 10, 20, 30 },
+                                            { 40, 50, 60 } }),
+                            new SimpleInheretingRecord(3.14159f, 1000, (short) 2, "def",
+                                    new long[][]
+                                        {
+                                            { 70, 80, 90 },
+                                            { 100, 110, 120 } }),
+                            new SimpleInheretingRecord(-1f, 10000, (short) 1, "ghi", new long[][]
+                                {
+                                    { 10, 20, 30 },
+                                    { 40, 50, 60 } }),
+                            new SimpleInheretingRecord(11.111111f, 100000, (short) 2, "jkl",
+                                    new long[][]
+                                        {
+                                            { 70, 80, 90 },
+                                            { 100, 110, 120 } }), }, new int[]
+                    { 2, 2 });
+        writer.compound().setMDArrayAttr("/", "cpdMDArray", recordMDArrWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleInheretingRecord> type =
+                reader.compound().getAttributeType("/", "cpd", SimpleInheretingRecord.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final SimpleInheretingRecord recordRead =
+                reader.compound().getAttr("/", "cpd", SimpleInheretingRecord.class);
+        assertEquals(recordWritten, recordRead);
+        final SimpleInheretingRecord[] recordArrRead =
+                reader.compound().getArrayAttr("/", "cpdArray", SimpleInheretingRecord.class);
+        assertTrue(Arrays.equals(recordArrWritten, recordArrRead));
+        final MDArray<SimpleInheretingRecord> recordMDArrRead =
+                reader.compound().getMDArrayAttr("/", "cpdMDArray", SimpleInheretingRecord.class);
+        assertEquals(recordMDArrWritten, recordMDArrRead);
+        reader.close();
+    }
+
+    static class RecordRequiringMemAlignment
+    {
+        byte b1;
+
+        short s;
+
+        byte b2;
+
+        int i;
+
+        byte b3;
+
+        long l;
+
+        public RecordRequiringMemAlignment()
+        {
+        }
+
+        RecordRequiringMemAlignment(byte b1, short s, byte b2, int i, byte b3, long l)
+        {
+            super();
+            this.b1 = b1;
+            this.s = s;
+            this.b2 = b2;
+            this.i = i;
+            this.b3 = b3;
+            this.l = l;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + b1;
+            result = prime * result + b2;
+            result = prime * result + b3;
+            result = prime * result + i;
+            result = prime * result + (int) (l ^ (l >>> 32));
+            result = prime * result + s;
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            RecordRequiringMemAlignment other = (RecordRequiringMemAlignment) obj;
+            if (b1 != other.b1)
+            {
+                return false;
+            }
+            if (b2 != other.b2)
+            {
+                return false;
+            }
+            if (b3 != other.b3)
+            {
+                return false;
+            }
+            if (i != other.i)
+            {
+                return false;
+            }
+            if (l != other.l)
+            {
+                return false;
+            }
+            if (s != other.s)
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "RecordRequringMemAlignment [b1=" + b1 + ", s=" + s + ", b2=" + b2 + ", i=" + i
+                    + ", b3=" + b3 + ", l=" + l + "]";
+        }
+
+    }
+
+    @Test
+    public void testCompoundAttributeMemoryAlignment()
+    {
+        final File file = new File(workingDirectory, "compoundAttributeMemoryAlignment.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final RecordRequiringMemAlignment recordWritten =
+                new RecordRequiringMemAlignment((byte) 1, (short) 2, (byte) 3, 4, (byte) 5, 6L);
+        writer.int32().write("val", 0);
+        writer.compound().setAttr("val", "attr0d", recordWritten);
+        final RecordRequiringMemAlignment[] recordArrayWritten =
+                new RecordRequiringMemAlignment[]
+                    {
+                            new RecordRequiringMemAlignment((byte) 7, (short) 8, (byte) 9, 10,
+                                    (byte) 11, 12L),
+                            new RecordRequiringMemAlignment((byte) 13, (short) 14, (byte) 15, 16,
+                                    (byte) 17, 18L) };
+        writer.compound().setArrayAttr("val", "attr1d", recordArrayWritten);
+        final MDArray<RecordRequiringMemAlignment> recordMDArrayWritten =
+                new MDArray<RecordRequiringMemAlignment>(new RecordRequiringMemAlignment[]
+                    {
+                            new RecordRequiringMemAlignment((byte) 19, (short) 20, (byte) 21, 22,
+                                    (byte) 23, 24L),
+                            new RecordRequiringMemAlignment((byte) 25, (short) 26, (byte) 27, 28,
+                                    (byte) 29, 30L),
+                            new RecordRequiringMemAlignment((byte) 31, (short) 32, (byte) 33, 34,
+                                    (byte) 35, 36L),
+                            new RecordRequiringMemAlignment((byte) 37, (short) 38, (byte) 39, 40,
+                                    (byte) 41, 42L), }, new long[]
+                    { 2, 2 });
+        writer.compound().setMDArrayAttr("val", "attr2d", recordMDArrayWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<RecordRequiringMemAlignment> type =
+                reader.compound().getInferredType(RecordRequiringMemAlignment.class);
+        final HDF5CompoundMemberInformation[] infos = type.getCompoundMemberInformation();
+        for (int i = 0; i < infos.length; ++i)
+        {
+            assertEquals(infos[i].getName() + "(" + i + ")", type.getObjectByteifyer()
+                    .getByteifyers()[i].getOffsetOnDisk(), infos[i].getOffsetOnDisk());
+            assertEquals(infos[i].getName() + "(" + i + ")", type.getObjectByteifyer()
+                    .getByteifyers()[i].getOffsetInMemory(), infos[i].getOffsetInMemory());
+            assertEquals(infos[i].getName() + "(" + i + ")", type.getObjectByteifyer()
+                    .getByteifyers()[i].getSize(), infos[i].getType().getElementSize());
+        }
+        final RecordRequiringMemAlignment recordRead =
+                reader.compound().getAttr("val", "attr0d", RecordRequiringMemAlignment.class);
+        assertEquals(recordWritten, recordRead);
+        final RecordRequiringMemAlignment[] recordArrayRead =
+                reader.compound().getArrayAttr("val", "attr1d", RecordRequiringMemAlignment.class);
+        assertTrue(Arrays.equals(recordArrayWritten, recordArrayRead));
+        final MDArray<RecordRequiringMemAlignment> recordMDArrayRead =
+                reader.compound()
+                        .getMDArrayAttr("val", "attr2d", RecordRequiringMemAlignment.class);
+        assertTrue(recordMDArrayWritten.equals(recordMDArrayRead));
+        reader.close();
+    }
+
+    @Test
+    public void testCompound()
+    {
+        final File file = new File(workingDirectory, "compound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final SimpleInheretingRecord recordWritten =
+                new SimpleInheretingRecord(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 } });
+        writer.compound().write("cpd", recordWritten);
+        final SimpleInheretingRecord2 recordWritten2 =
+                new SimpleInheretingRecord2(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 },
+                        { 7, 8, 9 } });
+        writer.compound().write("cpd2", recordWritten2);
+        final SimpleInheretingRecord3 recordWritten3 =
+                new SimpleInheretingRecord3(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 },
+                        { 7, 8, 11 } });
+        writer.compound().write("cpd3", recordWritten3);
+
+        final File file2 = new File(workingDirectory, "compound2.h5");
+        file2.delete();
+        assertFalse(file2.exists());
+        file2.deleteOnExit();
+        final IHDF5Writer writer2 = HDF5Factory.open(file2);
+        final HDF5CompoundType<HDF5CompoundDataMap> clonedType =
+                writer2.compound().getClonedType(
+                        writer.compound().getDataSetType("cpd", HDF5CompoundDataMap.class));
+        writer2.compound().write("cpd", clonedType,
+                writer.compound().read("cpd", HDF5CompoundDataMap.class));
+
+        writer.close();
+        writer2.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleInheretingRecord> type =
+                reader.compound().getDataSetType("cpd", SimpleInheretingRecord.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final SimpleInheretingRecord recordRead = reader.compound().read("cpd", type);
+        assertEquals(recordWritten, recordRead);
+        final SimpleInheretingRecord2 recordRead2 =
+                reader.compound().read("cpd2", SimpleInheretingRecord2.class);
+        assertEquals(recordWritten2, recordRead2);
+        final SimpleInheretingRecord3 recordRead3 =
+                reader.compound().read("cpd3", SimpleInheretingRecord3.class);
+        assertEquals(recordWritten3, recordRead3);
+        HDF5CompoundMemberInformation[] infos = type.getCompoundMemberInformation();
+        for (int i = 0; i < infos.length; ++i)
+        {
+            assertEquals("" + i, type.getObjectByteifyer().getByteifyers()[i].getOffsetOnDisk(),
+                    infos[i].getOffsetOnDisk());
+            assertEquals("" + i, type.getObjectByteifyer().getByteifyers()[i].getOffsetInMemory(),
+                    infos[i].getOffsetInMemory());
+        }
+        reader.close();
+
+        final IHDF5Reader reader2 = HDF5Factory.openForReading(file2);
+        final HDF5CompoundType<SimpleInheretingRecord> type2 =
+                reader2.compound().getDataSetType("cpd", SimpleInheretingRecord.class);
+        assertFalse(type2.isMappingIncomplete());
+        assertFalse(type2.isDiskRepresentationIncomplete());
+        assertFalse(type2.isMemoryRepresentationIncomplete());
+        assertEquals("SimpleInheretingRecord", type2.getName());
+        type2.checkMappingComplete();
+        final SimpleInheretingRecord recordReadFile2 = reader2.compound().read("cpd", type2);
+        assertEquals(recordWritten, recordReadFile2);
+        reader2.close();
+    }
+
+    static class SimpleStringRecord
+    {
+        String s1;
+
+        String s2;
+
+        SimpleStringRecord()
+        {
+        }
+
+        SimpleStringRecord(String s, String s2)
+        {
+            this.s1 = s;
+            this.s2 = s2;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((s1 == null) ? 0 : s1.hashCode());
+            result = prime * result + ((s2 == null) ? 0 : s2.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleStringRecord other = (SimpleStringRecord) obj;
+            if (s1 == null)
+            {
+                if (other.s1 != null)
+                {
+                    return false;
+                }
+            } else if (!s1.equals(other.s1))
+            {
+                return false;
+            }
+            if (s2 == null)
+            {
+                if (other.s2 != null)
+                {
+                    return false;
+                }
+            } else if (!s2.equals(other.s2))
+            {
+                return false;
+            }
+            return true;
+        }
+    }
+
+    @Test
+    public void testCompoundInferStringLength()
+    {
+        final File file = new File(workingDirectory, "stringsInCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final SimpleStringRecord recordWritten = new SimpleStringRecord("hello", "X");
+        writer.compound().write("strings", recordWritten);
+        final SimpleStringRecord[] recordArrayWritten = new SimpleStringRecord[]
+            { new SimpleStringRecord("hello", "X"), new SimpleStringRecord("Y2", "0123456789") };
+        writer.compound().writeArray("stringsArray", recordArrayWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleStringRecord> type =
+                reader.compound().getInferredType(recordWritten);
+        assertEquals(2, type.getCompoundMemberInformation().length);
+        assertEquals("s1", type.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, type.getCompoundMemberInformation()[0].getType()
+                .getDataClass());
+        assertEquals(recordWritten.s1.length(), type.getCompoundMemberInformation()[0].getType()
+                .getSize());
+        assertEquals("s2", type.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.STRING, type.getCompoundMemberInformation()[1].getType()
+                .getDataClass());
+        assertEquals(recordWritten.s2.length(), type.getCompoundMemberInformation()[1].getType()
+                .getSize());
+        final SimpleStringRecord recordRead = reader.compound().read("strings", type);
+        assertEquals(recordWritten, recordRead);
+
+        final HDF5CompoundType<SimpleStringRecord> arrayType =
+                reader.compound().getInferredType(recordArrayWritten);
+        assertEquals("s1", arrayType.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, arrayType.getCompoundMemberInformation()[0].getType()
+                .getDataClass());
+        assertEquals(recordArrayWritten[0].s1.length(), arrayType.getCompoundMemberInformation()[0]
+                .getType().getSize());
+        assertEquals("s2", arrayType.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.STRING, arrayType.getCompoundMemberInformation()[1].getType()
+                .getDataClass());
+        assertEquals(recordArrayWritten[1].s2.length(), arrayType.getCompoundMemberInformation()[1]
+                .getType().getSize());
+        final SimpleStringRecord[] recordArrayRead =
+                reader.compound().readArray("stringsArray", SimpleStringRecord.class);
+        assertEquals(2, recordArrayRead.length);
+        assertEquals(recordArrayWritten[0], recordArrayRead[0]);
+        assertEquals(recordArrayWritten[1], recordArrayRead[1]);
+
+        reader.close();
+
+    }
+
+    static class SimpleRecordWithStringsAndIntsAnnoted
+    {
+        @CompoundElement(variableLength = true)
+        String s1;
+
+        int i1;
+
+        @CompoundElement(variableLength = true)
+        String s2;
+
+        int i2;
+
+        SimpleRecordWithStringsAndIntsAnnoted()
+        {
+        }
+
+        SimpleRecordWithStringsAndIntsAnnoted(String s1, int i1, String s2, int i2)
+        {
+            this.s1 = s1;
+            this.i1 = i1;
+            this.s2 = s2;
+            this.i2 = i2;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + i1;
+            result = prime * result + i2;
+            result = prime * result + ((s1 == null) ? 0 : s1.hashCode());
+            result = prime * result + ((s2 == null) ? 0 : s2.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleRecordWithStringsAndIntsAnnoted other =
+                    (SimpleRecordWithStringsAndIntsAnnoted) obj;
+            if (i1 != other.i1)
+            {
+                return false;
+            }
+            if (i2 != other.i2)
+            {
+                return false;
+            }
+            if (s1 == null)
+            {
+                if (other.s1 != null)
+                {
+                    return false;
+                }
+            } else if (!s1.equals(other.s1))
+            {
+                return false;
+            }
+            if (s2 == null)
+            {
+                if (other.s2 != null)
+                {
+                    return false;
+                }
+            } else if (!s2.equals(other.s2))
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleRecordWithStringsAndIntsAnnotated [s1=" + s1 + ", i1=" + i1 + ", s2="
+                    + s2 + ", i2=" + i2 + "]";
+        }
+
+    }
+
+    static class SimpleRecordWithStringsAndInts
+    {
+        String s1;
+
+        int i1;
+
+        @CompoundElement(dimensions =
+            { 10 })
+        String s2;
+
+        int i2;
+
+        SimpleRecordWithStringsAndInts()
+        {
+        }
+
+        SimpleRecordWithStringsAndInts(String s1, int i1, String s2, int i2)
+        {
+            this.s1 = s1;
+            this.i1 = i1;
+            this.s2 = s2;
+            this.i2 = i2;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + i1;
+            result = prime * result + i2;
+            result = prime * result + ((s1 == null) ? 0 : s1.hashCode());
+            result = prime * result + ((s2 == null) ? 0 : s2.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleRecordWithStringsAndInts other = (SimpleRecordWithStringsAndInts) obj;
+            if (i1 != other.i1)
+            {
+                return false;
+            }
+            if (i2 != other.i2)
+            {
+                return false;
+            }
+            if (s1 == null)
+            {
+                if (other.s1 != null)
+                {
+                    return false;
+                }
+            } else if (!s1.equals(other.s1))
+            {
+                return false;
+            }
+            if (s2 == null)
+            {
+                if (other.s2 != null)
+                {
+                    return false;
+                }
+            } else if (!s2.equals(other.s2))
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleRecordWithStringsAndInts [s1=" + s1 + ", i1=" + i1 + ", s2=" + s2
+                    + ", i2=" + i2 + "]";
+        }
+
+    }
+
+    @Test
+    public void testCompoundVariableLengthString()
+    {
+        final File file = new File(workingDirectory, "variableLengthStringsInCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundType<SimpleRecordWithStringsAndIntsAnnoted> typeWritten =
+                writer.compound().getInferredType(SimpleRecordWithStringsAndIntsAnnoted.class);
+        final SimpleRecordWithStringsAndIntsAnnoted recordWritten =
+                new SimpleRecordWithStringsAndIntsAnnoted("hello", 17, "world", 1);
+        writer.compound().write("stringAntInt", typeWritten, recordWritten);
+        final SimpleRecordWithStringsAndIntsAnnoted[] recordArrayWritten =
+                new SimpleRecordWithStringsAndIntsAnnoted[]
+                    { new SimpleRecordWithStringsAndIntsAnnoted("hello", 3, "0123456789", 100000),
+                            new SimpleRecordWithStringsAndIntsAnnoted("Y2", -1, "What?", -100000) };
+        writer.compound().writeArray("stringsArray", typeWritten, recordArrayWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleRecordWithStringsAndIntsAnnoted> typeRead =
+                reader.compound().getDataSetType("stringAntInt",
+                        SimpleRecordWithStringsAndIntsAnnoted.class);
+        assertEquals(4, typeRead.getCompoundMemberInformation().length);
+        assertEquals("s1", typeRead.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, typeRead.getCompoundMemberInformation()[0].getType()
+                .getDataClass());
+        assertTrue(typeRead.getCompoundMemberInformation()[0].getType().isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(), typeRead.getCompoundMemberInformation()[0]
+                .getType().getSize());
+        assertEquals("i1", typeRead.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.INTEGER, typeRead.getCompoundMemberInformation()[1].getType()
+                .getDataClass());
+        assertEquals("s2", typeRead.getCompoundMemberInformation()[2].getName());
+        assertEquals(HDF5DataClass.STRING, typeRead.getCompoundMemberInformation()[2].getType()
+                .getDataClass());
+        assertTrue(typeRead.getCompoundMemberInformation()[2].getType().isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(), typeRead.getCompoundMemberInformation()[2]
+                .getType().getSize());
+        assertEquals("i2", typeRead.getCompoundMemberInformation()[3].getName());
+        assertEquals(HDF5DataClass.INTEGER, typeRead.getCompoundMemberInformation()[3].getType()
+                .getDataClass());
+        assertFalse(typeRead.getCompoundMemberInformation()[3].getType().isVariableLengthString());
+        final SimpleRecordWithStringsAndIntsAnnoted recordRead =
+                reader.compound().read("stringAntInt", typeRead);
+        assertEquals(recordWritten, recordRead);
+
+        final HDF5CompoundType<SimpleRecordWithStringsAndIntsAnnoted> arrayTypeRead =
+                reader.compound().getDataSetType("stringsArray",
+                        SimpleRecordWithStringsAndIntsAnnoted.class);
+        assertEquals(4, arrayTypeRead.getCompoundMemberInformation().length);
+        assertEquals("s1", arrayTypeRead.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, arrayTypeRead.getCompoundMemberInformation()[0]
+                .getType().getDataClass());
+        assertTrue(arrayTypeRead.getCompoundMemberInformation()[0].getType()
+                .isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(),
+                arrayTypeRead.getCompoundMemberInformation()[0].getType().getSize());
+        assertEquals("i1", arrayTypeRead.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.INTEGER, arrayTypeRead.getCompoundMemberInformation()[1]
+                .getType().getDataClass());
+        assertEquals("s2", arrayTypeRead.getCompoundMemberInformation()[2].getName());
+        assertEquals(HDF5DataClass.STRING, arrayTypeRead.getCompoundMemberInformation()[2]
+                .getType().getDataClass());
+        assertTrue(arrayTypeRead.getCompoundMemberInformation()[2].getType()
+                .isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(),
+                arrayTypeRead.getCompoundMemberInformation()[2].getType().getSize());
+        assertEquals("i2", arrayTypeRead.getCompoundMemberInformation()[3].getName());
+        assertEquals(HDF5DataClass.INTEGER, arrayTypeRead.getCompoundMemberInformation()[3]
+                .getType().getDataClass());
+        assertFalse(arrayTypeRead.getCompoundMemberInformation()[3].getType()
+                .isVariableLengthString());
+        final SimpleRecordWithStringsAndIntsAnnoted[] recordArrayRead =
+                reader.compound().readArray("stringsArray", arrayTypeRead);
+        assertEquals(recordArrayWritten.length, recordArrayRead.length);
+        assertEquals(recordArrayWritten[0], recordArrayRead[0]);
+        assertEquals(recordArrayWritten[1], recordArrayRead[1]);
+
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundVariableLengthStringUsingHints()
+    {
+        final File file =
+                new File(workingDirectory, "variableLengthStringsInCompoundUsingHints.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundType<SimpleRecordWithStringsAndInts> typeWritten =
+                writer.compound().getInferredType(SimpleRecordWithStringsAndInts.class,
+                        new HDF5CompoundMappingHints().useVariableLengthStrings());
+        final SimpleRecordWithStringsAndInts recordWritten =
+                new SimpleRecordWithStringsAndInts("hello", 17, "world", 1);
+        writer.compound().write("stringAntInt", typeWritten, recordWritten);
+        final SimpleRecordWithStringsAndInts[] recordArrayWritten =
+                new SimpleRecordWithStringsAndInts[]
+                    { new SimpleRecordWithStringsAndInts("hello", 3, "0123456789", 100000),
+                            new SimpleRecordWithStringsAndInts("Y2", -1, "What?", -100000) };
+        writer.compound().writeArray("stringsArray", typeWritten, recordArrayWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleRecordWithStringsAndInts> typeRead =
+                reader.compound().getDataSetType("stringAntInt",
+                        SimpleRecordWithStringsAndInts.class);
+        assertEquals(4, typeRead.getCompoundMemberInformation().length);
+        assertEquals("s1", typeRead.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, typeRead.getCompoundMemberInformation()[0].getType()
+                .getDataClass());
+        assertTrue(typeRead.getCompoundMemberInformation()[0].getType().isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(), typeRead.getCompoundMemberInformation()[0]
+                .getType().getSize());
+        assertEquals("i1", typeRead.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.INTEGER, typeRead.getCompoundMemberInformation()[1].getType()
+                .getDataClass());
+        assertEquals("s2", typeRead.getCompoundMemberInformation()[2].getName());
+        assertEquals(HDF5DataClass.STRING, typeRead.getCompoundMemberInformation()[2].getType()
+                .getDataClass());
+        assertFalse(typeRead.getCompoundMemberInformation()[2].getType().isVariableLengthString());
+        assertEquals(10, typeRead.getCompoundMemberInformation()[2].getType().getElementSize());
+        assertEquals(10, typeRead.getCompoundMemberInformation()[2].getType().getSize());
+        assertEquals("i2", typeRead.getCompoundMemberInformation()[3].getName());
+        assertEquals(HDF5DataClass.INTEGER, typeRead.getCompoundMemberInformation()[3].getType()
+                .getDataClass());
+        assertFalse(typeRead.getCompoundMemberInformation()[3].getType().isVariableLengthString());
+        final SimpleRecordWithStringsAndInts recordRead =
+                reader.compound().read("stringAntInt", typeRead);
+        assertEquals(recordWritten, recordRead);
+
+        final HDF5CompoundType<SimpleRecordWithStringsAndInts> arrayTypeRead =
+                reader.compound().getDataSetType("stringsArray",
+                        SimpleRecordWithStringsAndInts.class);
+        assertEquals(4, arrayTypeRead.getCompoundMemberInformation().length);
+        assertEquals("s1", arrayTypeRead.getCompoundMemberInformation()[0].getName());
+        assertEquals(HDF5DataClass.STRING, arrayTypeRead.getCompoundMemberInformation()[0]
+                .getType().getDataClass());
+        assertTrue(arrayTypeRead.getCompoundMemberInformation()[0].getType()
+                .isVariableLengthString());
+        assertEquals(HDFNativeData.getMachineWordSize(),
+                arrayTypeRead.getCompoundMemberInformation()[0].getType().getSize());
+        assertEquals("i1", arrayTypeRead.getCompoundMemberInformation()[1].getName());
+        assertEquals(HDF5DataClass.INTEGER, arrayTypeRead.getCompoundMemberInformation()[1]
+                .getType().getDataClass());
+        assertEquals("s2", arrayTypeRead.getCompoundMemberInformation()[2].getName());
+        assertEquals(HDF5DataClass.STRING, arrayTypeRead.getCompoundMemberInformation()[2]
+                .getType().getDataClass());
+        assertFalse(arrayTypeRead.getCompoundMemberInformation()[2].getType()
+                .isVariableLengthString());
+        assertEquals(10, arrayTypeRead.getCompoundMemberInformation()[2].getType().getElementSize());
+        assertEquals(10, arrayTypeRead.getCompoundMemberInformation()[2].getType().getSize());
+        assertEquals("i2", arrayTypeRead.getCompoundMemberInformation()[3].getName());
+        assertEquals(HDF5DataClass.INTEGER, arrayTypeRead.getCompoundMemberInformation()[3]
+                .getType().getDataClass());
+        assertFalse(arrayTypeRead.getCompoundMemberInformation()[3].getType()
+                .isVariableLengthString());
+        final SimpleRecordWithStringsAndInts[] recordArrayRead =
+                reader.compound().readArray("stringsArray", arrayTypeRead);
+        assertEquals(recordArrayWritten.length, recordArrayRead.length);
+        assertEquals(recordArrayWritten[0], recordArrayRead[0]);
+        assertEquals(recordArrayWritten[1], recordArrayRead[1]);
+
+        reader.close();
+    }
+
+    static class SimpleRecordWithReference
+    {
+        @CompoundElement(reference = true)
+        String ref;
+
+        SimpleRecordWithReference()
+        {
+        }
+
+        SimpleRecordWithReference(String ref)
+        {
+            this.ref = ref;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((ref == null) ? 0 : ref.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleRecordWithReference other = (SimpleRecordWithReference) obj;
+            if (ref == null)
+            {
+                if (other.ref != null)
+                {
+                    return false;
+                }
+            } else if (!ref.equals(other.ref))
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleRecordWithReference [ref=" + ref + "]";
+        }
+    }
+
+    @Test
+    public void testCompoundReference()
+    {
+        final File file = new File(workingDirectory, "compoundReference.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        writer.int32().write("a", 17);
+        writer.float64().write("b", 0.001);
+        writer.compound().write("cpd1", new SimpleRecordWithReference("a"));
+        writer.compound().write("cpd2", new SimpleRecordWithReference("b"));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundMemberInformation[] infoFromJavaObjs =
+                HDF5CompoundMemberInformation.create(SimpleRecordWithReference.class, "",
+                        HDF5CompoundMemberMapping.inferMapping(SimpleRecordWithReference.class));
+        assertEquals(1, infoFromJavaObjs.length);
+        assertEquals("ref:REFERENCE(8)", infoFromJavaObjs[0].toString());
+        final HDF5CompoundMemberInformation[] infoFromHDF5Objs =
+                reader.compound().getDataSetInfo("cpd1");
+        assertEquals(1, infoFromHDF5Objs.length);
+        assertEquals("ref:REFERENCE(8)", infoFromHDF5Objs[0].toString());
+        final SimpleRecordWithReference recordRead1 =
+                reader.compound().read("cpd1", SimpleRecordWithReference.class);
+        assertEquals("/a", reader.reference().resolvePath(recordRead1.ref));
+        assertEquals("INTEGER(4):{}", reader.object().getDataSetInformation(recordRead1.ref)
+                .toString());
+        assertEquals(17, reader.int32().read(recordRead1.ref));
+
+        final HDF5CompoundMemberInformation[] info2 = reader.compound().getDataSetInfo("cpd2");
+        assertEquals(1, info2.length);
+        assertEquals("ref:REFERENCE(8)", info2[0].toString());
+        final SimpleRecordWithReference recordRead2 =
+                reader.compound().read("cpd2", SimpleRecordWithReference.class);
+        assertEquals("/b", reader.reference().resolvePath(recordRead2.ref));
+        assertEquals("FLOAT(8):{}", reader.object().getDataSetInformation(recordRead2.ref)
+                .toString());
+        assertEquals(0.001, reader.float64().read(recordRead2.ref));
+        reader.close();
+    }
+
+    @Test
+    public void testClosedCompoundType()
+    {
+        final File file = new File(workingDirectory, "closedCompoundType.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final SimpleInheretingRecord recordWritten =
+                new SimpleInheretingRecord(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 } });
+        final HDF5CompoundType<SimpleInheretingRecord> type =
+                writer.compound().getInferredType(SimpleInheretingRecord.class);
+        writer.compound().write("cpd", recordWritten);
+        writer.close();
+
+        final File file2 = new File(workingDirectory, "closedCompoundType2.h5");
+        file2.delete();
+        assertFalse(file2.exists());
+        file2.deleteOnExit();
+        final IHDF5Writer writer2 = HDF5Factory.open(file2);
+        try
+        {
+            writer2.compound().write("cpd", type, recordWritten);
+            fail("Failed to detect closed type.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Type SimpleInheretingRecord is closed.", ex.getMessage());
+        }
+        try
+        {
+            writer2.compound().getClonedType(type);
+            fail("Failed to detect closed type.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals("Type SimpleInheretingRecord is closed.", ex.getMessage());
+        }
+        writer2.close();
+    }
+
+    @Test
+    public void testAnonCompound()
+    {
+        final File file = new File(workingDirectory, "anonCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundType<SimpleInheretingRecord> anonType1 =
+                writer.compound().getInferredAnonType(SimpleInheretingRecord.class);
+        assertEquals("UNKNOWN", anonType1.getName());
+        final SimpleInheretingRecord recordWritten =
+                new SimpleInheretingRecord(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 } });
+        writer.compound().write("cpd", anonType1, recordWritten);
+        final SimpleInheretingRecord2 recordWritten2 =
+                new SimpleInheretingRecord2(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 },
+                        { 7, 8, 9 } });
+        final HDF5CompoundType<SimpleInheretingRecord2> anonType2 =
+                writer.compound().getInferredAnonType(recordWritten2);
+        assertEquals("UNKNOWN", anonType2.getName());
+        writer.compound().write("cpd2", anonType2, recordWritten2);
+        final SimpleInheretingRecord3 recordWritten3 =
+                new SimpleInheretingRecord3(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 },
+                        { 7, 8, 11 } });
+        final HDF5CompoundType<SimpleInheretingRecord3> anonType3 =
+                writer.compound().getInferredAnonType(recordWritten3);
+        assertEquals("UNKNOWN", anonType3.getName());
+        writer.compound().write("cpd3", anonType3, recordWritten3);
+
+        final File file2 = new File(workingDirectory, "anonCompound2.h5");
+        file2.delete();
+        assertFalse(file2.exists());
+        file2.deleteOnExit();
+        final IHDF5Writer writer2 = HDF5Factory.open(file2);
+        final HDF5CompoundType<HDF5CompoundDataMap> clonedType =
+                writer2.compound().getClonedType(
+                        writer.compound().getDataSetType("cpd", HDF5CompoundDataMap.class));
+        writer2.compound().write("cpd", clonedType,
+                writer.compound().read("cpd", HDF5CompoundDataMap.class));
+
+        writer.close();
+        writer2.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleInheretingRecord> type =
+                reader.compound().getDataSetType("cpd", SimpleInheretingRecord.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final SimpleInheretingRecord recordRead = reader.compound().read("cpd", type);
+        assertEquals(recordWritten, recordRead);
+        final SimpleInheretingRecord2 recordRead2 =
+                reader.compound().read("cpd2", SimpleInheretingRecord2.class);
+        assertEquals(recordWritten2, recordRead2);
+        final SimpleInheretingRecord3 recordRead3 =
+                reader.compound().read("cpd3", SimpleInheretingRecord3.class);
+        assertEquals(recordWritten3, recordRead3);
+        reader.close();
+
+        final IHDF5Reader reader2 = HDF5Factory.openForReading(file2);
+        final HDF5CompoundType<SimpleInheretingRecord> type2 =
+                reader2.compound().getDataSetType("cpd", SimpleInheretingRecord.class);
+        assertFalse(type2.isMappingIncomplete());
+        assertFalse(type2.isDiskRepresentationIncomplete());
+        assertFalse(type2.isMemoryRepresentationIncomplete());
+        assertEquals("UNKNOWN", type2.getName());
+        type2.checkMappingComplete();
+        final SimpleInheretingRecord recordReadFile2 = reader2.compound().read("cpd", type2);
+        assertEquals(recordWritten, recordReadFile2);
+        reader2.close();
+    }
+
+    static class StringEnumCompoundType
+    {
+        String fruit;
+
+        StringEnumCompoundType()
+        {
+        }
+
+        StringEnumCompoundType(String fruit)
+        {
+            this.fruit = fruit;
+        }
+    }
+
+    static class OrdinalEnumCompoundType
+    {
+        int fruit;
+
+        OrdinalEnumCompoundType()
+        {
+        }
+
+        OrdinalEnumCompoundType(int fruit)
+        {
+            this.fruit = fruit;
+        }
+    }
+
+    @Test
+    public void testCompoundJavaEnum()
+    {
+        final File file = new File(workingDirectory, "compoundJavaEnum.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final JavaEnumCompoundType recordWritten = new JavaEnumCompoundType(FruitEnum.CHERRY);
+        writer.compound().write("cpd", recordWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<JavaEnumCompoundType> type =
+                reader.compound().getDataSetType("cpd", JavaEnumCompoundType.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final JavaEnumCompoundType recordRead = reader.compound().read("cpd", type);
+        assertEquals(recordWritten, recordRead);
+        final StringEnumCompoundType stringRecordRead =
+                reader.readCompound("cpd", StringEnumCompoundType.class);
+        assertEquals(FruitEnum.CHERRY.name(), stringRecordRead.fruit);
+        final OrdinalEnumCompoundType ordinalRecordRead =
+                reader.readCompound("cpd", OrdinalEnumCompoundType.class);
+        assertEquals(FruitEnum.CHERRY.ordinal(), ordinalRecordRead.fruit);
+        reader.close();
+    }
+
+    @Test
+    public void testEnumFromCompoundJavaEnum()
+    {
+        final File file = new File(workingDirectory, "enumsFromCompoundJavaEnum.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final JavaMultipleEnumsCompoundType recordWritten =
+                new JavaMultipleEnumsCompoundType(FruitEnum.APPLE, ColorEnum.BLUE,
+                        StateEnum.ONGOING);
+        HDF5CompoundType<JavaMultipleEnumsCompoundType> type =
+                writer.compound().getInferredAnonType(JavaMultipleEnumsCompoundType.class);
+        writer.compound().write("cpd", type, recordWritten);
+        Map<String, HDF5EnumerationType> enumMap = type.getEnumTypeMap();
+        assertEquals("[fruit, color, state]", enumMap.keySet().toString());
+        writer.enumeration().write("fruit",
+                new HDF5EnumerationValue(enumMap.get("fruit"), "ORANGE"));
+        writer.enumeration()
+                .write("color", new HDF5EnumerationValue(enumMap.get("color"), "BLACK"));
+        writer.enumeration()
+                .write("state", new HDF5EnumerationValue(enumMap.get("state"), "READY"));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        type = reader.compound().getDataSetType("cpd", JavaMultipleEnumsCompoundType.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final JavaMultipleEnumsCompoundType recordRead = reader.compound().read("cpd", type);
+        enumMap = type.getEnumTypeMap();
+        assertEquals(recordWritten, recordRead);
+        assertEquals(FruitEnum.APPLE, recordRead.fruit);
+        assertEquals(ColorEnum.BLUE, recordRead.color);
+        assertEquals(StateEnum.ONGOING, recordRead.state);
+        assertEquals(reader.enumeration().getDataSetType("fruit"), enumMap.get("fruit"));
+        assertEquals(reader.enumeration().getDataSetType("color"), enumMap.get("color"));
+        assertEquals(reader.enumeration().getDataSetType("state"), enumMap.get("state"));
+        assertEquals("ORANGE", reader.enumeration().read("fruit").getValue());
+        assertEquals("BLACK", reader.enumeration().read("color").getValue());
+        assertEquals("READY", reader.enumeration().read("state").getValue());
+        reader.close();
+    }
+
+    static class JavaEnumArrayCompoundType
+    {
+        FruitEnum[] fruits;
+
+        JavaEnumArrayCompoundType()
+        {
+        }
+
+        JavaEnumArrayCompoundType(FruitEnum[] fruits)
+        {
+            this.fruits = fruits;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + Arrays.hashCode(fruits);
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            JavaEnumArrayCompoundType other = (JavaEnumArrayCompoundType) obj;
+            if (!Arrays.equals(fruits, other.fruits))
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "JavaEnumArrayCompoundType [fruits=" + Arrays.toString(fruits) + "]";
+        }
+    }
+
+    static class StringEnumArrayCompoundType
+    {
+        String[] fruits;
+
+        StringEnumArrayCompoundType()
+        {
+        }
+
+        StringEnumArrayCompoundType(String[] fruits)
+        {
+            this.fruits = fruits;
+        }
+    }
+
+    static class OrdinalEnumArrayCompoundType
+    {
+        int[] fruits;
+
+        OrdinalEnumArrayCompoundType()
+        {
+        }
+
+        OrdinalEnumArrayCompoundType(int[] fruits)
+        {
+            this.fruits = fruits;
+        }
+    }
+
+    @Test
+    public void testCompoundJavaEnumArray()
+    {
+        final File file = new File(workingDirectory, "compoundJavaEnumArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final JavaEnumArrayCompoundType recordWritten =
+                new JavaEnumArrayCompoundType(new FruitEnum[]
+                    { FruitEnum.CHERRY, FruitEnum.APPLE, FruitEnum.ORANGE });
+        writer.compound().write("cpd", recordWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<JavaEnumArrayCompoundType> type =
+                reader.compound().getDataSetType("cpd", JavaEnumArrayCompoundType.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final JavaEnumArrayCompoundType recordRead = reader.compound().read("cpd", type);
+        assertEquals(recordWritten, recordRead);
+        final StringEnumArrayCompoundType stringRecordRead =
+                reader.readCompound("cpd", StringEnumArrayCompoundType.class);
+        assertTrue(Arrays.toString(stringRecordRead.fruits), Arrays.equals(new String[]
+            { "CHERRY", "APPLE", "ORANGE" }, stringRecordRead.fruits));
+        final OrdinalEnumArrayCompoundType ordinalRecordRead =
+                reader.readCompound("cpd", OrdinalEnumArrayCompoundType.class);
+        assertTrue(Arrays.toString(ordinalRecordRead.fruits), Arrays.equals(new int[]
+            { 2, 0, 1 }, ordinalRecordRead.fruits));
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundJavaEnumMap()
+    {
+        final File file = new File(workingDirectory, "compoundJavaEnumMap.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundDataMap recordWritten = new HDF5CompoundDataMap();
+        recordWritten.put("fruit", FruitEnum.ORANGE);
+        writer.compound().write("cpd", recordWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<HDF5CompoundDataMap> type =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class);
+        assertFalse(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertFalse(type.isMemoryRepresentationIncomplete());
+        type.checkMappingComplete();
+        final Map<String, Object> recordRead = reader.compound().read("cpd", type);
+        assertEquals(1, recordRead.size());
+        assertEquals("ORANGE", recordRead.get("fruit").toString());
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundIncompleteJavaPojo()
+    {
+        final File file = new File(workingDirectory, "compoundIncompleteJavaPojo.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final SimpleInheretingRecord recordWritten =
+                new SimpleInheretingRecord(3.14159f, 42, (short) 17, "xzy", new long[][]
+                    {
+                        { 1, 2, 3 },
+                        { 4, 5, 6 } });
+        writer.compound().write("cpd", recordWritten);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleRecord> type =
+                reader.compound().getDataSetType("cpd", SimpleRecord.class);
+        assertTrue(type.isMappingIncomplete());
+        assertFalse(type.isDiskRepresentationIncomplete());
+        assertTrue(type.isMemoryRepresentationIncomplete());
+        try
+        {
+            type.checkMappingComplete();
+            fail("Uncomplete mapping not detected.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals(
+                    "Incomplete mapping for compound type 'SimpleInheretingRecord': unmapped members: {ll}",
+                    ex.getMessage());
+        }
+        final SimpleRecord recordRead = reader.compound().read("cpd", type);
+        assertEquals(recordWritten.getF(), recordRead.getF());
+        assertEquals(recordWritten.getI(), recordRead.getI());
+        assertEquals(recordWritten.getD(), recordRead.getD());
+        assertEquals(recordWritten.getS(), recordRead.getS());
+        final HDF5CompoundType<SimpleRecord> type2 =
+                reader.compound().getInferredType("cpd", SimpleRecord.class, null, false);
+        assertFalse(type2.isMappingIncomplete());
+        assertFalse(type2.isDiskRepresentationIncomplete());
+        assertFalse(type2.isMemoryRepresentationIncomplete());
+        type2.checkMappingComplete();
+        final SimpleRecord recordRead2 = reader.compound().read("cpd", type2);
+        assertEquals(recordWritten.getF(), recordRead2.getF());
+        assertEquals(recordWritten.getI(), recordRead2.getI());
+        assertEquals(recordWritten.getD(), recordRead2.getD());
+        assertEquals(recordWritten.getS(), recordRead2.getS());
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundHintVLString()
+    {
+        final File file = new File(workingDirectory, "testCompoundHintVLString.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        writer.compound().getInferredType(SimpleStringRecord.class,
+                new HDF5CompoundMappingHints().useVariableLengthStrings());
+        final HDF5CompoundType<SimpleStringRecord> typeWritten =
+                writer.compound().getInferredType("SimpleStringRecordByTemplate",
+                        new SimpleStringRecord("aaa", "bb"),
+                        new HDF5CompoundMappingHints().useVariableLengthStrings());
+        final SimpleStringRecord recordWritten = new SimpleStringRecord("aaa", "\u3453");
+        writer.compound().write("cpd", typeWritten, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<SimpleStringRecord> typeRead =
+                reader.compound().getNamedType(SimpleStringRecord.class);
+        final HDF5CompoundType<SimpleStringRecord> type2Read =
+                reader.compound().getNamedType("SimpleStringRecordByTemplate",
+                        SimpleStringRecord.class);
+        assertEquals(2, typeRead.getCompoundMemberInformation().length);
+        assertTrue(typeRead.getCompoundMemberInformation()[0].getType().isVariableLengthString());
+        assertTrue(typeRead.getCompoundMemberInformation()[1].getType().isVariableLengthString());
+        assertTrue(type2Read.getCompoundMemberInformation()[0].getType().isVariableLengthString());
+        assertTrue(type2Read.getCompoundMemberInformation()[1].getType().isVariableLengthString());
+        final SimpleStringRecord recordRead = reader.compound().read("cpd", type2Read);
+        assertEquals(recordWritten, recordRead);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMap()
+    {
+        final File file = new File(workingDirectory, "testCompoundMap.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.configure(file).useUTF8CharacterEncoding().writer();
+        final HDF5EnumerationType enumType =
+                writer.enumeration().getType("someEnumType", new String[]
+                    { "1", "Two", "THREE" });
+        final HDF5CompoundDataMap map = new HDF5CompoundDataMap();
+        final float a = 3.14159f;
+        map.put("a", a);
+        final int[] b = new int[]
+            { 17, -1 };
+        map.put("b", b);
+        final String c = "Teststring\u3453";
+        map.put("c", c);
+        final HDF5EnumerationValueArray d = new HDF5EnumerationValueArray(enumType, new String[]
+            { "Two", "1" });
+        map.put("d", d);
+        final BitSet e = new BitSet();
+        e.set(15);
+        map.put("e", e);
+        final float[][] f = new float[][]
+            {
+                { 1.0f, -1.0f },
+                { 1e6f, -1e6f } };
+        map.put("f", f);
+        final MDLongArray g = new MDLongArray(new long[]
+            { 1, 2, 3, 4, 5, 6, 7, 8 }, new int[]
+            { 2, 2, 2 });
+        map.put("g", g);
+        final HDF5TimeDuration h = new HDF5TimeDuration(17, HDF5TimeUnit.HOURS);
+        map.put("h", h);
+        final Date ii = new Date(10000);
+        map.put("i", ii);
+        writer.compound().write("cpd", map);
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundType<HDF5CompoundDataMap> typeRead =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class);
+        assertEquals("a:b:c:d:e:f:g:h:i", typeRead.getName());
+        final HDF5CompoundDataMap mapRead = reader.compound().read("cpd", typeRead);
+        assertEquals(9, mapRead.size());
+        assertEquals(a, mapRead.get("a"));
+        assertTrue(ArrayUtils.toString(mapRead.get("b")), ArrayUtils.isEquals(b, mapRead.get("b")));
+        assertEquals(c, mapRead.get("c"));
+        final HDF5EnumerationValueArray dRead = (HDF5EnumerationValueArray) mapRead.get("d");
+        assertEquals("someEnumType", dRead.getType().getName());
+        assertEquals(d.getLength(), dRead.getLength());
+        for (int i = 0; i < d.getLength(); ++i)
+        {
+            assertEquals("enum array idx=" + i, d.getValue(i), dRead.getValue(i));
+        }
+        assertEquals(e, mapRead.get("e"));
+        assertTrue(ArrayUtils.toString(mapRead.get("f")), ArrayUtils.isEquals(f, mapRead.get("f")));
+        assertEquals(g, mapRead.get("g"));
+        assertEquals(h, mapRead.get("h"));
+        assertEquals(ii, mapRead.get("i"));
+
+        final HDF5CompoundType<HDF5CompoundDataMap> typeRead2 =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class,
+                        new HDF5CompoundMappingHints().enumReturnType(EnumReturnType.STRING));
+        final HDF5CompoundDataMap mapRead2 = reader.compound().read("cpd", typeRead2);
+        final String[] dRead2 = (String[]) mapRead2.get("d");
+        assertEquals(dRead.getLength(), dRead2.length);
+        for (int i = 0; i < dRead2.length; ++i)
+        {
+            assertEquals(dRead.getValue(i), dRead2[i]);
+        }
+
+        final HDF5CompoundType<HDF5CompoundDataMap> typeRead3 =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class,
+                        new HDF5CompoundMappingHints().enumReturnType(EnumReturnType.ORDINAL));
+        final HDF5CompoundDataMap mapRead3 = reader.compound().read("cpd", typeRead3);
+        final int[] dRead3 = (int[]) mapRead3.get("d");
+        assertEquals(dRead.getLength(), dRead3.length);
+        for (int i = 0; i < dRead3.length; ++i)
+        {
+            assertEquals(dRead.getOrdinal(i), dRead3[i]);
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMapManualMapping()
+    {
+        final File file = new File(workingDirectory, "testCompoundMapManualMapping.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType =
+                writer.enumeration().getType("someEnumType", new String[]
+                    { "1", "Two", "THREE" });
+        final HDF5CompoundType<HDF5CompoundDataMap> type =
+                writer.compound()
+                        .getType(
+                                "MapCompoundA",
+                                HDF5CompoundDataMap.class,
+                                new HDF5CompoundMemberMapping[]
+                                    {
+                                            HDF5CompoundMemberMapping.mapping("a").memberClass(
+                                                    float.class),
+                                            mapping("b").memberClass(int[].class).length(2),
+                                            mapping("c").memberClass(char[].class).length(12),
+                                            mapping("d").enumType(enumType).length(2),
+                                            mapping("e").memberClass(BitSet.class).length(2),
+                                            mapping("f").memberClass(float[][].class).dimensions(2,
+                                                    2),
+                                            mapping("g").memberClass(MDLongArray.class).dimensions(
+                                                    new int[]
+                                                        { 2, 2, 2 }),
+                                            mapping("h")
+                                                    .memberClass(HDF5TimeDuration.class)
+                                                    .typeVariant(
+                                                            HDF5DataTypeVariant.TIME_DURATION_HOURS),
+                                            mapping("i").memberClass(Date.class) });
+        final HDF5CompoundDataMap map = new HDF5CompoundDataMap();
+        final float a = 3.14159f;
+        map.put("a", a);
+        final int[] b = new int[]
+            { 17, -1 };
+        map.put("b", b);
+        final String c = "Teststring";
+        map.put("c", c);
+        final HDF5EnumerationValueArray d = new HDF5EnumerationValueArray(enumType, new String[]
+            { "Two", "1" });
+        map.put("d", d);
+        final BitSet e = new BitSet();
+        e.set(15);
+        map.put("e", e);
+        final float[][] f = new float[][]
+            {
+                { 1.0f, -1.0f },
+                { 1e6f, -1e6f } };
+        map.put("f", f);
+        final MDLongArray g = new MDLongArray(new long[]
+            { 1, 2, 3, 4, 5, 6, 7, 8 }, new int[]
+            { 2, 2, 2 });
+        map.put("g", g);
+        final HDF5TimeDuration h = new HDF5TimeDuration(17, HDF5TimeUnit.HOURS);
+        map.put("h", h);
+        final Date ii = new Date(10000);
+        map.put("i", ii);
+        writer.compound().write("cpd", type, map);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundType<HDF5CompoundDataMap> typeRead =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class);
+        assertEquals("MapCompoundA", typeRead.getName());
+        final HDF5CompoundDataMap mapRead = reader.compound().read("cpd", typeRead);
+        assertEquals(9, mapRead.size());
+        assertEquals(a, mapRead.get("a"));
+        assertTrue(ArrayUtils.toString(mapRead.get("b")), ArrayUtils.isEquals(b, mapRead.get("b")));
+        assertEquals(c, mapRead.get("c"));
+        final HDF5EnumerationValueArray dRead = (HDF5EnumerationValueArray) mapRead.get("d");
+        assertEquals("someEnumType", dRead.getType().getName());
+        assertEquals(d.getLength(), dRead.getLength());
+        for (int i = 0; i < d.getLength(); ++i)
+        {
+            assertEquals("enum array idx=" + i, d.getValue(i), dRead.getValue(i));
+        }
+        assertEquals(e, mapRead.get("e"));
+        assertTrue(ArrayUtils.toString(mapRead.get("f")), ArrayUtils.isEquals(f, mapRead.get("f")));
+        assertEquals(g, mapRead.get("g"));
+        assertEquals(h, mapRead.get("h"));
+        assertEquals(ii, mapRead.get("i"));
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMapManualMappingWithConversion()
+    {
+        final File file =
+                new File(workingDirectory, "testCompoundMapManualMappingWithConversion.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType =
+                writer.enumeration().getType("someEnumType", new String[]
+                    { "1", "Two", "THREE" });
+        final HDF5CompoundType<HDF5CompoundDataMap> type =
+                writer.compound()
+                        .getType(
+                                "MapCompoundA",
+                                HDF5CompoundDataMap.class,
+                                new HDF5CompoundMemberMapping[]
+                                    {
+                                            HDF5CompoundMemberMapping.mapping("a").memberClass(
+                                                    float.class),
+                                            mapping("b").memberClass(short.class),
+                                            mapping("c").memberClass(Date.class),
+                                            mapping("d").enumType(enumType).length(2),
+                                            mapping("e").memberClass(double.class),
+                                            mapping("f")
+                                                    .memberClass(HDF5TimeDuration.class)
+                                                    .typeVariant(
+                                                            HDF5DataTypeVariant.TIME_DURATION_HOURS) });
+        final HDF5CompoundDataMap map = new HDF5CompoundDataMap();
+        final double a = 3.14159;
+        map.put("a", a);
+        final int b = 17;
+        map.put("b", b);
+        final long c = System.currentTimeMillis();
+        map.put("c", c);
+        final int[] d = new int[]
+            { 1, 0 };
+        map.put("d", d);
+        final long e = 187493613;
+        map.put("e", e);
+        final short f = 12;
+        map.put("f", f);
+        writer.compound().write("cpd", type, map);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundType<HDF5CompoundDataMap> typeRead =
+                reader.compound().getDataSetType("cpd", HDF5CompoundDataMap.class);
+        assertEquals("MapCompoundA", typeRead.getName());
+        final HDF5CompoundDataMap mapRead = reader.compound().read("cpd", typeRead);
+        assertEquals(map.size(), mapRead.size());
+        assertEquals((float) a, mapRead.get("a"));
+        assertEquals((short) b, mapRead.get("b"));
+        assertEquals(new Date(c), mapRead.get("c"));
+        final HDF5EnumerationValueArray dRead = (HDF5EnumerationValueArray) mapRead.get("d");
+        assertEquals("someEnumType", dRead.getType().getName());
+        assertEquals(d.length, dRead.getLength());
+        for (int i = 0; i < d.length; ++i)
+        {
+            assertEquals("enum array idx=" + i, d[i], dRead.getOrdinal(i));
+        }
+        assertEquals((double) e, mapRead.get("e"));
+        assertEquals(new HDF5TimeDuration(f, HDF5TimeUnit.HOURS), mapRead.get("f"));
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundManualMapping()
+    {
+        final File file = new File(workingDirectory, "compoundManualMapping.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType");
+        final Record recordWritten =
+                new Record(1, 2.0f, 100000000L, 3.0, (short) 4, true, "one",
+                        new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                            { 1, 2, 3 }, new float[]
+                            { 8.0f, -17.0f }, new long[]
+                            { -10, -11, -12 }, new double[]
+                            { 3.14159 }, new short[]
+                            { 1000, 2000 }, new byte[]
+                            { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                            {
+                                { 1, 2 },
+                                { 3, 4 } }), new char[]
+                            { 'A', 'b', 'C' });
+        writer.compound().write("/testCompound", compoundType, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundMemberInformation[] memMemberInfo =
+                Record.getMemberInfo(reader.enumeration().getType("someEnumType"));
+        final HDF5CompoundMemberInformation[] diskMemberInfo =
+                reader.compound().getDataSetInfo("/testCompound", DataTypeInfoOptions.ALL);
+        assertEquals(memMemberInfo.length, diskMemberInfo.length);
+        Arrays.sort(memMemberInfo);
+        Arrays.sort(diskMemberInfo);
+        for (int i = 0; i < memMemberInfo.length; ++i)
+        {
+            assertEquals(memMemberInfo[i], diskMemberInfo[i]);
+        }
+        compoundType = Record.getHDF5Type(reader);
+        final Record recordRead =
+                reader.compound().read("/testCompound", Record.getHDF5Type(reader));
+        assertEquals(recordWritten, recordRead);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMapArray()
+    {
+        final File file = new File(workingDirectory, "testCompoundMapArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundDataMap map1 = new HDF5CompoundDataMap();
+        final float a1 = 3.14159f;
+        map1.put("a", a1);
+        final HDF5CompoundDataMap map2 = new HDF5CompoundDataMap();
+        final float a2 = 18.32f;
+        map2.put("a", a2);
+        final HDF5CompoundDataMap map3 = new HDF5CompoundDataMap();
+        final float a3 = 1.546e5f;
+        map3.put("a", a3);
+        writer.writeCompoundArray("cpd", new HDF5CompoundDataMap[]
+            { map1, map2, map3 });
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final HDF5CompoundDataMap[] maps =
+                reader.compound().readArray("cpd", HDF5CompoundDataMap.class);
+        assertEquals(3, maps.length);
+        assertEquals(map1, maps[0]);
+        assertEquals(map2, maps[1]);
+        assertEquals(map3, maps[2]);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMapMDArray()
+    {
+        final File file = new File(workingDirectory, "testCompoundMapMDArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        final HDF5CompoundDataMap map1 = new HDF5CompoundDataMap();
+        final float a1 = 3.14159f;
+        map1.put("a", a1);
+        final HDF5CompoundDataMap map2 = new HDF5CompoundDataMap();
+        final float a2 = 18.32f;
+        map2.put("a", a2);
+        final HDF5CompoundDataMap map3 = new HDF5CompoundDataMap();
+        final float a3 = 1.546e5f;
+        map3.put("a", a3);
+        final HDF5CompoundDataMap map4 = new HDF5CompoundDataMap();
+        final float a4 = -3.2f;
+        map4.put("a", a4);
+        writer.compound().writeMDArray("cpd",
+                new MDArray<HDF5CompoundDataMap>(new HDF5CompoundDataMap[]
+                    { map1, map2, map3, map4 }, new int[]
+                    { 2, 2 }));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final MDArray<HDF5CompoundDataMap> maps =
+                reader.compound().readMDArray("cpd", HDF5CompoundDataMap.class);
+        assertTrue(ArrayUtils.isEquals(new int[]
+            { 2, 2 }, maps.dimensions()));
+        assertEquals(map1, maps.get(0, 0));
+        assertEquals(map2, maps.get(0, 1));
+        assertEquals(map3, maps.get(1, 0));
+        assertEquals(map4, maps.get(1, 1));
+        reader.close();
+    }
+
+    static class DateRecord
+    {
+        Date d;
+
+        DateRecord()
+        {
+        }
+
+        DateRecord(Date d)
+        {
+            this.d = d;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((d == null) ? 0 : d.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+                return true;
+            if (obj == null)
+                return false;
+            if (getClass() != obj.getClass())
+                return false;
+            DateRecord other = (DateRecord) obj;
+            if (d == null)
+            {
+                if (other.d != null)
+                    return false;
+            } else if (!d.equals(other.d))
+                return false;
+            return true;
+        }
+
+    }
+
+    @Test
+    public void testDateCompound()
+    {
+        final File file = new File(workingDirectory, "compoundWithDate.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<DateRecord> compoundType =
+                writer.compound().getType(DateRecord.class, new HDF5CompoundMemberMapping[]
+                    { mapping("d") });
+        final DateRecord recordWritten = new DateRecord(new Date());
+        final String objectPath = "/testDateCompound";
+        writer.compound().write(objectPath, compoundType, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundMemberInformation[] memMemberInfo =
+                HDF5CompoundMemberInformation.create(DateRecord.class, "", mapping("d"));
+        final HDF5CompoundMemberInformation[] diskMemberInfo =
+                HDF5CompoundMemberInformation.create(DateRecord.class, "",
+                        new HDF5CompoundMemberMapping[]
+                            { mapping("d") });
+        assertEquals(memMemberInfo.length, diskMemberInfo.length);
+        for (int i = 0; i < memMemberInfo.length; ++i)
+        {
+            assertEquals(memMemberInfo[i], diskMemberInfo[i]);
+        }
+        compoundType = reader.compound().getType(DateRecord.class, new HDF5CompoundMemberMapping[]
+            { mapping("d") });
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                compoundType.getObjectByteifyer().getByteifyers()[0].getTypeVariant());
+        final DateRecord recordRead =
+                reader.compound().read(objectPath,
+                        reader.compound().getType(DateRecord.class, mapping("d")));
+        assertEquals(recordWritten, recordRead);
+        HDF5CompoundType<HDF5CompoundDataMap> mapCompoundType =
+                reader.compound().getDataSetType(objectPath, HDF5CompoundDataMap.class);
+        assertEquals(HDF5DataTypeVariant.TIMESTAMP_MILLISECONDS_SINCE_START_OF_THE_EPOCH,
+                mapCompoundType.getObjectByteifyer().getByteifyers()[0].getTypeVariant());
+        final HDF5CompoundDataMap mapRead = reader.compound().read(objectPath, mapCompoundType);
+        assertEquals(recordWritten.d, mapRead.get("d"));
+        reader.close();
+    }
+
+    static class MatrixRecord
+    {
+        byte[][] b;
+
+        short[][] s;
+
+        int[][] i;
+
+        long[][] l;
+
+        float[][] f;
+
+        double[][] d;
+
+        MatrixRecord()
+        {
+        }
+
+        MatrixRecord(byte[][] b, short[][] s, int[][] i, long[][] l, float[][] f, double[][] d)
+        {
+            this.b = b;
+            this.s = s;
+            this.i = i;
+            this.l = l;
+            this.f = f;
+            this.d = d;
+        }
+
+        static HDF5CompoundMemberMapping[] getMapping()
+        {
+            return new HDF5CompoundMemberMapping[]
+                { mapping("b").dimensions(1, 2), mapping("s").dimensions(2, 1),
+                        mapping("i").dimensions(2, 2), mapping("l").dimensions(3, 2),
+                        mapping("f").dimensions(2, 2), mapping("d").dimensions(2, 3) };
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+                return true;
+            if (obj == null)
+                return false;
+            if (getClass() != obj.getClass())
+                return false;
+            MatrixRecord other = (MatrixRecord) obj;
+            if (!HDF5RoundtripTest.equals(b, other.b))
+                return false;
+            if (!HDF5RoundtripTest.equals(d, other.d))
+                return false;
+            if (!HDF5RoundtripTest.equals(f, other.f))
+                return false;
+            if (!HDF5RoundtripTest.equals(i, other.i))
+                return false;
+            if (!HDF5RoundtripTest.equals(l, other.l))
+                return false;
+            if (!HDF5RoundtripTest.equals(s, other.s))
+                return false;
+            return true;
+        }
+
+    }
+
+    @Test
+    public void testMatrixCompound()
+    {
+        final File file = new File(workingDirectory, "compoundWithMatrix.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<MatrixRecord> compoundType =
+                writer.compound().getType(MatrixRecord.class, MatrixRecord.getMapping());
+        final MatrixRecord recordWritten = new MatrixRecord(new byte[][]
+            {
+                { 1, 2 } }, new short[][]
+            {
+                { 1 },
+                { 2 } }, new int[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new long[][]
+            {
+                { 1, 2 },
+                { 3, 4 },
+                { 5, 6 } }, new float[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new double[][]
+            {
+                { 1, 2, 3 },
+                { 4, 5, 6 } });
+        String name = "/testMatrixCompound";
+        writer.compound().write(name, compoundType, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundMemberInformation[] memMemberInfo =
+                HDF5CompoundMemberInformation.create(MatrixRecord.class, "",
+                        MatrixRecord.getMapping());
+        final HDF5CompoundMemberInformation[] diskMemberInfo =
+                HDF5CompoundMemberInformation.create(MatrixRecord.class, "",
+                        MatrixRecord.getMapping());
+        assertEquals(memMemberInfo.length, diskMemberInfo.length);
+        for (int i = 0; i < memMemberInfo.length; ++i)
+        {
+            assertEquals(memMemberInfo[i], diskMemberInfo[i]);
+        }
+        compoundType = reader.compound().getType(MatrixRecord.class, MatrixRecord.getMapping());
+        final MatrixRecord recordRead =
+                reader.compound().read(name,
+                        reader.compound().getType(MatrixRecord.class, MatrixRecord.getMapping()));
+        assertEquals(recordWritten, recordRead);
+        reader.close();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testMatrixCompoundSizeMismatch()
+    {
+        final File file = new File(workingDirectory, "compoundWithSizeMismatchMatrix.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<MatrixRecord> compoundType =
+                writer.compound().getType(MatrixRecord.class, MatrixRecord.getMapping());
+        final MatrixRecord recordWritten = new MatrixRecord(new byte[][]
+            {
+                { 1, 2 } }, new short[][]
+            {
+                { 1 },
+                { 2 } }, new int[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new long[][]
+            {
+                { 1, 2 },
+                { 3, 4 },
+                { 5, 6 } }, new float[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new double[][]
+            {
+                { 1, 2, 3, 4 },
+                { 5, 6, 7, 8 },
+                { 9, 10, 11, 12, 13 } });
+        String name = "/testMatrixCompound";
+        writer.compound().write(name, compoundType, recordWritten);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testMatrixCompoundDifferentNumberOfColumnsPerRow()
+    {
+        final File file =
+                new File(workingDirectory, "compoundWithMatrixDifferentNumberOfColumnsPerRow.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<MatrixRecord> compoundType =
+                writer.compound().getType(MatrixRecord.class, MatrixRecord.getMapping());
+        final MatrixRecord recordWritten = new MatrixRecord(new byte[][]
+            {
+                { 1, 2 } }, new short[][]
+            {
+                { 1 },
+                { 2 } }, new int[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new long[][]
+            {
+                { 1, 2 },
+                { 3, 4 },
+                { 5, 6 } }, new float[][]
+            {
+                { 1, 2 },
+                { 3, 4 } }, new double[][]
+            {
+                { 1, 2, 3 },
+                { 4, 5 } });
+        String name = "/testMatrixCompound";
+        writer.compound().write(name, compoundType, recordWritten);
+    }
+
+    private static boolean equals(double[][] a, double[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (Double.doubleToLongBits(a[i][j]) != Double.doubleToLongBits(a2[i][j]))
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    private static boolean equals(byte[][] a, byte[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (a[i][j] != a2[i][j])
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    private static boolean equals(short[][] a, short[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (a[i][j] != a2[i][j])
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    private static boolean equals(int[][] a, int[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (a[i][j] != a2[i][j])
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    private static boolean equals(long[][] a, long[][] a2)
+    {
+        if (a == a2)
+        {
+            return true;
+        }
+        if (a == null || a2 == null)
+        {
+            return false;
+        }
+
+        int rows = a.length;
+        if (a2.length != rows)
+        {
+            return false;
+        }
+
+        for (int i = 0; i < rows; i++)
+        {
+            int columns = a[i].length;
+            if (a2[i].length != columns)
+            {
+                return false;
+            }
+            for (int j = 0; j < columns; j++)
+            {
+                if (a[i][j] != a2[i][j])
+                {
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    @Test
+    public void testCompoundOverflow()
+    {
+        final File file = new File(workingDirectory, "compoundOverflow.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType");
+        final Record recordWritten =
+                new Record(1, 2.0f, 100000000L, 3.0, (short) 4, true, "one",
+                        new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                            { 1, 2, 3 }, new float[]
+                            { 8.0f, -17.0f }, new long[]
+                            { -10, -11, -12 }, new double[]
+                            { 3.14159 }, new short[]
+                            { 1000, 2000 }, new byte[]
+                            { 11, 12, 13, 14, 0, 0, 0 }, new MDIntArray(new int[][]
+                            {
+                                { 5, 6 },
+                                { 7, 8 } }), new char[]
+                            { 'A', 'b', 'C' });
+        try
+        {
+            writer.compound().write("/testCompound", compoundType, recordWritten);
+            fail("Failed to detect overflow.");
+        } catch (HDF5JavaException ex)
+        {
+            if (ex.getMessage().contains("must not exceed 4 bytes") == false)
+            {
+                throw ex;
+            }
+            // Expected.
+        } finally
+        {
+            writer.close();
+        }
+    }
+
+    static class BitFieldRecord
+    {
+        BitSet bs;
+
+        BitFieldRecord(BitSet bs)
+        {
+            this.bs = bs;
+        }
+
+        BitFieldRecord()
+        {
+        }
+
+        static HDF5CompoundMemberInformation[] getMemberInfo()
+        {
+            return HDF5CompoundMemberInformation.create(BitFieldRecord.class, "", mapping("bs")
+                    .length(100));
+        }
+
+        static HDF5CompoundType<BitFieldRecord> getHDF5Type(IHDF5Reader reader)
+        {
+            return reader.compound().getType(BitFieldRecord.class, mapping("bs").length(100));
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (obj instanceof BitFieldRecord == false)
+            {
+                return false;
+            }
+            final BitFieldRecord that = (BitFieldRecord) obj;
+            return this.bs.equals(that.bs);
+        }
+
+        @Override
+        public int hashCode()
+        {
+            return bs.hashCode();
+        }
+    }
+
+    @Test
+    public void testBitFieldCompound()
+    {
+        final File file = new File(workingDirectory, "compoundWithBitField.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<BitFieldRecord> compoundType = BitFieldRecord.getHDF5Type(writer);
+        final BitSet bs = new BitSet();
+        bs.set(39);
+        bs.set(100);
+        final BitFieldRecord recordWritten = new BitFieldRecord(bs);
+        writer.compound().write("/testCompound", compoundType, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundMemberInformation[] memMemberInfo = BitFieldRecord.getMemberInfo();
+        final HDF5CompoundMemberInformation[] diskMemberInfo =
+                reader.compound().getDataSetInfo("/testCompound");
+        assertEquals(memMemberInfo.length, diskMemberInfo.length);
+        for (int i = 0; i < memMemberInfo.length; ++i)
+        {
+            assertEquals(memMemberInfo[i], diskMemberInfo[i]);
+        }
+        compoundType = BitFieldRecord.getHDF5Type(reader);
+        final BitFieldRecord recordRead = reader.compound().read("/testCompound", compoundType);
+        assertEquals(recordWritten, recordRead);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundArray()
+    {
+        final File file = new File(workingDirectory, "compoundArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType", new String[]
+            { "1", "Two", "THREE" }, false);
+        Record[] arrayWritten =
+                new Record[]
+                    {
+                            new Record(1, 2.0f, 100000000L, 3.0, (short) -1, true, "one",
+                                    new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                                        { 1, 2, 3 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, -14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 1, 2 },
+                                            { 3, 4 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(2, 3.0f, 100000000L, 4.0, (short) 5, false, "two",
+                                    new HDF5EnumerationValue(enumType, "1"), new int[]
+                                        { 4, 5, 6 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 5, 6 },
+                                            { 7, 8 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        writer.compound().writeArray("/testCompound", compoundType, arrayWritten,
+                HDF5GenericStorageFeatures.GENERIC_COMPACT);
+        HDF5CompoundType<Record> inferredType = writer.compound().getNamedType(Record.class);
+        // Write again, this time with inferred type.
+        writer.compound().writeArray("/testCompound", inferredType, arrayWritten,
+                HDF5GenericStorageFeatures.GENERIC_COMPACT);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5CompoundMemberInformation[] memberInfo =
+                reader.compound().getDataSetInfo("/testCompound");
+        assertEquals(16, memberInfo.length);
+        assertEquals("a", memberInfo[0].getName());
+        assertTrue(memberInfo[0].getType().isSigned());
+        assertEquals("d", memberInfo[4].getName());
+        assertFalse(memberInfo[4].getType().isSigned());
+        compoundType = Record.getHDF5Type(reader);
+        inferredType = reader.compound().getDataSetType("/testCompound", Record.class);
+        Record[] arrayRead = reader.compound().readArray("/testCompound", inferredType);
+        Record firstElementRead = reader.compound().read("/testCompound", compoundType);
+        assertEquals(arrayRead[0], firstElementRead);
+        for (int i = 0; i < arrayRead.length; ++i)
+        {
+            assertEquals("" + i, arrayWritten[i], arrayRead[i]);
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundArrayBlockWise()
+    {
+        final File file = new File(workingDirectory, "compoundVectorBlockWise.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType");
+        writer.compound().createArray("/testCompound", compoundType, 6, 3);
+        Record[] arrayWritten1 =
+                new Record[]
+                    {
+                            new Record(1, 2.0f, 100000000L, 3.0, (short) 4, true, "one",
+                                    new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                                        { 1, 2, 3 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 1, 2 },
+                                            { 3, 4 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(2, 3.0f, 100000000L, 4.0, (short) 5, false, "two",
+                                    new HDF5EnumerationValue(enumType, "1"), new int[]
+                                        { 4, 5, 6 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 1, 2 },
+                                            { 3, 4 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(3, 3.0f, 100000000L, 5.0, (short) 6, true, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { -1, -2, -3 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 1, 2 },
+                                            { 3, 4 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        Record[] arrayWritten2 =
+                new Record[]
+                    {
+                            new Record(4, 4.0f, 100000000L, 6.0, (short) 7, false, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { 100, 200, 300 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(5, 5.0f, 100000000L, 7.0, (short) 8, true, "two",
+                                    new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                                        { 400, 500, 600 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(6, 6.0f, 100000000L, 8.0, (short) 9, false, "x",
+                                    new HDF5EnumerationValue(enumType, "1"), new int[]
+                                        { -100, -200, -300 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        writer.compound().writeArrayBlock("/testCompound", compoundType, arrayWritten1, 0);
+        writer.compound().writeArrayBlock("/testCompound", compoundType, arrayWritten2, 1);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        compoundType = Record.getHDF5Type(reader);
+        Record[] arrayRead = reader.compound().readArrayBlock("/testCompound", compoundType, 3, 0);
+        for (int i = 0; i < arrayRead.length; ++i)
+        {
+            assertEquals("" + i, arrayWritten1[i], arrayRead[i]);
+        }
+        arrayRead = reader.compound().readArrayBlock("/testCompound", compoundType, 3, 1);
+        for (int i = 0; i < arrayRead.length; ++i)
+        {
+            assertEquals("" + i, arrayWritten2[i], arrayRead[i]);
+        }
+        arrayRead = reader.compound().readArrayBlockWithOffset("/testCompound", compoundType, 3, 1);
+        for (int i = 1; i < arrayRead.length; ++i)
+        {
+            assertEquals("" + i, arrayWritten1[i], arrayRead[i - 1]);
+        }
+        assertEquals("" + (arrayRead.length - 1), arrayWritten2[0], arrayRead[arrayRead.length - 1]);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMDArray()
+    {
+        final File file = new File(workingDirectory, "compoundMDArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(file);
+        writer.compound().writeMDArray(
+                "cpd",
+                new MDArray<SimpleRecord>(
+                        new SimpleRecord[]
+                            { createSR(1), createSR(2), createSR(3), createSR(4), createSR(5),
+                                    createSR(6) }, new int[]
+                            { 2, 3 }));
+        writer.close();
+
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        final MDArray<SimpleRecord> records =
+                reader.compound().readMDArray("cpd", SimpleRecord.class);
+        assertEquals(6, records.size());
+        assertTrue(ArrayUtils.isEquals(new int[]
+            { 2, 3 }, records.dimensions()));
+        assertEquals(createSR(1), records.get(0, 0));
+        assertEquals(createSR(2), records.get(0, 1));
+        assertEquals(createSR(3), records.get(0, 2));
+        assertEquals(createSR(4), records.get(1, 0));
+        assertEquals(createSR(5), records.get(1, 1));
+        assertEquals(createSR(6), records.get(1, 2));
+        reader.close();
+    }
+
+    private static SimpleRecord createSR(int i)
+    {
+        return new SimpleRecord(i, i, (short) i, Integer.toString(i));
+    }
+
+    @Test
+    public void testCompoundMDArrayManualMapping()
+    {
+        final File file = new File(workingDirectory, "compoundMDArrayManualMapping.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType");
+        final Record[] arrayWritten =
+                new Record[]
+                    {
+                            new Record(1, 2.0f, 100000000L, 3.0, (short) 4, true, "one",
+                                    new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                                        { 1, 2, 3 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(2, 3.0f, 100000000L, 4.0, (short) 5, false, "two",
+                                    new HDF5EnumerationValue(enumType, "1"), new int[]
+                                        { 4, 5, 6 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(3, 3.0f, 100000000L, 5.0, (short) 6, true, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { 7, 8, 9 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(4, 4.0f, 100000000L, 6.0, (short) 7, false, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { 10, 11, 12 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        final MDArray<Record> mdArrayWritten = new MDArray<Record>(arrayWritten, new int[]
+            { 2, 2 });
+        writer.compound().writeMDArray("/testCompound", compoundType, mdArrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        compoundType = Record.getHDF5Type(reader);
+        final MDArray<Record> mdArrayRead =
+                reader.compound().readMDArray("/testCompound", compoundType);
+        assertEquals(mdArrayWritten, mdArrayRead);
+        reader.close();
+    }
+
+    @Test
+    public void testCompoundMDArrayBlockWise()
+    {
+        final File file = new File(workingDirectory, "compoundMDArrayBlockWise.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<Record> compoundType = Record.getHDF5Type(writer);
+        HDF5EnumerationType enumType = writer.enumeration().getType("someEnumType");
+        writer.compound().createMDArray("/testCompound", compoundType, new long[]
+            { 2, 2 }, new int[]
+            { 2, 1 });
+        final Record[] arrayWritten1 =
+                new Record[]
+                    {
+                            new Record(1, 2.0f, 100000000L, 3.0, (short) 4, true, "one",
+                                    new HDF5EnumerationValue(enumType, "THREE"), new int[]
+                                        { 1, 2, 3 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(2, 3.0f, 100000000L, 4.0, (short) 5, false, "two",
+                                    new HDF5EnumerationValue(enumType, "1"), new int[]
+                                        { 2, 3, 4 }, new float[]
+                                        { 8.1f, -17.1f }, new long[]
+                                        { -10, -13, -12 }, new double[]
+                                        { 3.1415 }, new short[]
+                                        { 1000, 2001 }, new byte[]
+                                        { 11, 12, 13, 17 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        final Record[] arrayWritten2 =
+                new Record[]
+                    {
+                            new Record(3, 3.0f, 100000000L, 5.0, (short) 6, true, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { 3, 4, 5 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }),
+                            new Record(4, 4.0f, 100000000L, 6.0, (short) 7, false, "two",
+                                    new HDF5EnumerationValue(enumType, "Two"), new int[]
+                                        { 4, 5, 6 }, new float[]
+                                        { 8.0f, -17.0f }, new long[]
+                                        { -10, -11, -12 }, new double[]
+                                        { 3.14159 }, new short[]
+                                        { 1000, 2000 }, new byte[]
+                                        { 11, 12, 13, 14 }, new MDIntArray(new int[][]
+                                        {
+                                            { 6, 7 },
+                                            { 8, 9 } }), new char[]
+                                        { 'A', 'b', 'C' }), };
+        final MDArray<Record> mdArrayWritten1 = new MDArray<Record>(arrayWritten1, new int[]
+            { 2, 1 });
+        final MDArray<Record> mdArrayWritten2 = new MDArray<Record>(arrayWritten2, new int[]
+            { 2, 1 });
+        writer.compound().writeMDArrayBlock("/testCompound", compoundType, mdArrayWritten1,
+                new long[]
+                    { 0, 0 });
+        writer.compound().writeMDArrayBlock("/testCompound", compoundType, mdArrayWritten2,
+                new long[]
+                    { 0, 1 });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        compoundType = Record.getHDF5Type(reader);
+        final MDArray<Record> mdArrayRead1 =
+                reader.compound().readMDArrayBlock("/testCompound", compoundType, new int[]
+                    { 2, 1 }, new long[]
+                    { 0, 0 });
+        final MDArray<Record> mdArrayRead2 =
+                reader.compound().readMDArrayBlock("/testCompound", compoundType, new int[]
+                    { 2, 1 }, new long[]
+                    { 0, 1 });
+        assertEquals(mdArrayWritten1, mdArrayRead1);
+        assertEquals(mdArrayWritten2, mdArrayRead2);
+        reader.close();
+    }
+
+    static class RecordA
+    {
+        int a;
+
+        double b;
+
+        RecordA(int a, float b)
+        {
+            this.a = a;
+            this.b = b;
+        }
+
+        RecordA()
+        {
+        }
+
+        static HDF5CompoundType<RecordA> getHDF5Type(IHDF5Reader reader)
+        {
+            return reader.compound().getType(RecordA.class, mapping("a"), mapping("b"));
+        }
+    }
+
+    static class RecordB
+    {
+        float a;
+
+        long b;
+
+        RecordB(float a, int b)
+        {
+            this.a = a;
+            this.b = b;
+        }
+
+        RecordB()
+        {
+        }
+
+        static HDF5CompoundType<RecordB> getHDF5Type(IHDF5Reader reader)
+        {
+            return reader.compound().getType(RecordB.class, mapping("a"), mapping("b"));
+        }
+    }
+
+    static class RecordC
+    {
+        float a;
+
+        RecordC(float a)
+        {
+            this.a = a;
+        }
+
+        RecordC()
+        {
+        }
+
+    }
+
+    static class RecordD
+    {
+        @CompoundElement(memberName = "a")
+        float b;
+
+        RecordD(float b)
+        {
+            this.b = b;
+        }
+
+        RecordD()
+        {
+        }
+
+    }
+
+    static class RecordE
+    {
+        int a;
+
+        RecordE(int a)
+        {
+            this.a = a;
+        }
+
+        RecordE()
+        {
+        }
+
+    }
+
+    static class MatrixElementRecord
+    {
+        int row;
+
+        int col;
+
+        MatrixElementRecord()
+        {
+        }
+
+        MatrixElementRecord(int row, int col)
+        {
+            this.row = row;
+            this.col = col;
+        }
+
+        boolean equals(@SuppressWarnings("hiding")
+        int row, @SuppressWarnings("hiding")
+        int col)
+        {
+            return this.row == row && this.col == col;
+        }
+
+        @Override
+        public boolean equals(Object o)
+        {
+            if (o instanceof MatrixElementRecord == false)
+            {
+                return false;
+            }
+            final MatrixElementRecord m = (MatrixElementRecord) o;
+            return equals(m.row, m.col);
+        }
+
+        @Override
+        public String toString()
+        {
+            return "(" + row + "," + col + ")";
+        }
+    }
+
+    @Test
+    public void testIterateOverMDCompoundArrayInNaturalBlocks()
+    {
+        final File datasetFile =
+                new File(workingDirectory, "iterateOverMDCompoundArrayInNaturalBlocks.h5");
+        datasetFile.delete();
+        assertFalse(datasetFile.exists());
+        datasetFile.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(datasetFile);
+        final String dsName = "ds";
+        final HDF5CompoundType<MatrixElementRecord> typeW =
+                writer.compound().getInferredType(MatrixElementRecord.class);
+        assertEquals(HDF5Utils.getDataTypeGroup("") + "/Compound_MatrixElementRecord",
+                typeW.tryGetDataTypePath());
+        assertEquals("<MatrixElementRecord>COMPOUND(8)",
+                typeW.getDataTypeInformation(HDF5DataTypeInformation.options().all()).toString());
+        writer.compound().createMDArray(dsName, typeW, new long[]
+            { 4, 4 }, new int[]
+            { 2, 2 });
+        writer.compound().writeMDArrayBlock(
+                dsName,
+                typeW,
+                new MDArray<MatrixElementRecord>(new MatrixElementRecord[]
+                    { new MatrixElementRecord(1, 1), new MatrixElementRecord(1, 2),
+                            new MatrixElementRecord(2, 1), new MatrixElementRecord(2, 2) },
+                        new int[]
+                            { 2, 2 }), new long[]
+                    { 0, 0 });
+        writer.compound().writeMDArrayBlock(
+                dsName,
+                typeW,
+                new MDArray<MatrixElementRecord>(new MatrixElementRecord[]
+                    { new MatrixElementRecord(3, 1), new MatrixElementRecord(3, 2),
+                            new MatrixElementRecord(4, 1), new MatrixElementRecord(4, 2) },
+                        new int[]
+                            { 2, 2 }), new long[]
+                    { 1, 0 });
+        writer.compound().writeMDArrayBlock(
+                dsName,
+                typeW,
+                new MDArray<MatrixElementRecord>(new MatrixElementRecord[]
+                    { new MatrixElementRecord(1, 3), new MatrixElementRecord(1, 4),
+                            new MatrixElementRecord(2, 3), new MatrixElementRecord(2, 4) },
+                        new int[]
+                            { 2, 2 }), new long[]
+                    { 0, 1 });
+        writer.compound().writeMDArrayBlock(
+                dsName,
+                typeW,
+                new MDArray<MatrixElementRecord>(new MatrixElementRecord[]
+                    { new MatrixElementRecord(3, 3), new MatrixElementRecord(3, 4),
+                            new MatrixElementRecord(4, 3), new MatrixElementRecord(4, 4) },
+                        new int[]
+                            { 2, 2 }), new long[]
+                    { 1, 1 });
+        writer.close();
+
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(datasetFile);
+        int i = 0;
+        int j = 0;
+        final HDF5CompoundType<MatrixElementRecord> typeR =
+                reader.compound().getInferredType(MatrixElementRecord.class);
+        for (HDF5MDDataBlock<MDArray<MatrixElementRecord>> block : reader.compound()
+                .getMDArrayBlocks(dsName, typeR))
+        {
+            final String ij = new MatrixElementRecord(i, j).toString() + ": ";
+            assertTrue(ij + Arrays.toString(block.getIndex()), Arrays.equals(new long[]
+                { i, j }, block.getIndex()));
+            assertTrue(ij + Arrays.toString(block.getData().dimensions()), Arrays.equals(new int[]
+                { 2, 2 }, block.getData().dimensions()));
+            assertTrue(ij + Arrays.toString(block.getData().getAsFlatArray()), Arrays.equals(
+                    new MatrixElementRecord[]
+                        { new MatrixElementRecord(1 + i * 2, 1 + j * 2),
+                                new MatrixElementRecord(1 + i * 2, 2 + j * 2),
+                                new MatrixElementRecord(2 + i * 2, 1 + j * 2),
+                                new MatrixElementRecord(2 + i * 2, 2 + j * 2) }, block.getData()
+                            .getAsFlatArray()));
+            if (++j > 1)
+            {
+                j = 0;
+                ++i;
+            }
+        }
+        assertEquals(2, i);
+        assertEquals(0, j);
+        i = 0;
+        j = 0;
+        for (HDF5MDDataBlock<MDArray<MatrixElementRecord>> block : reader.compound()
+                .getMDArrayBlocks(dsName, MatrixElementRecord.class))
+        {
+            final String ij = new MatrixElementRecord(i, j).toString() + ": ";
+            assertTrue(ij + Arrays.toString(block.getIndex()), Arrays.equals(new long[]
+                { i, j }, block.getIndex()));
+            assertTrue(ij + Arrays.toString(block.getData().dimensions()), Arrays.equals(new int[]
+                { 2, 2 }, block.getData().dimensions()));
+            assertTrue(ij + Arrays.toString(block.getData().getAsFlatArray()), Arrays.equals(
+                    new MatrixElementRecord[]
+                        { new MatrixElementRecord(1 + i * 2, 1 + j * 2),
+                                new MatrixElementRecord(1 + i * 2, 2 + j * 2),
+                                new MatrixElementRecord(2 + i * 2, 1 + j * 2),
+                                new MatrixElementRecord(2 + i * 2, 2 + j * 2) }, block.getData()
+                            .getAsFlatArray()));
+            if (++j > 1)
+            {
+                j = 0;
+                ++i;
+            }
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testConfusedCompound()
+    {
+        final File file = new File(workingDirectory, "confusedCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<RecordA> compoundTypeInt = RecordA.getHDF5Type(writer);
+        final RecordA recordWritten = new RecordA(17, 42.0f);
+        writer.compound().write("/testCompound", compoundTypeInt, recordWritten);
+        writer.close();
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().configureForReading(file).performNumericConversions()
+                        .reader();
+        HDF5CompoundType<RecordB> compoundTypeFloat = RecordB.getHDF5Type(reader);
+        try
+        {
+            reader.compound().read("/testCompound", compoundTypeFloat);
+            fail("Unsuitable data set type not detected.");
+        } catch (HDF5JavaException ex)
+        {
+            assertEquals(
+                    "The compound type 'UNKNOWN' does not equal the compound type of data set '/testCompound'.",
+                    ex.getMessage());
+        }
+        reader.close();
+    }
+
+    static class SimpleRecord
+    {
+        private float f;
+
+        private int i;
+
+        @CompoundElement(typeVariant = HDF5DataTypeVariant.TIME_DURATION_SECONDS)
+        private short d;
+
+        @CompoundElement(dimensions = 4)
+        private String s;
+
+        SimpleRecord()
+        {
+        }
+
+        SimpleRecord(float f, int i, short d, String s)
+        {
+            this.f = f;
+            this.i = i;
+            this.d = d;
+            this.s = s;
+        }
+
+        public float getF()
+        {
+            return f;
+        }
+
+        public int getI()
+        {
+            return i;
+        }
+
+        public short getD()
+        {
+            return d;
+        }
+
+        public String getS()
+        {
+            return s;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + d;
+            result = prime * result + Float.floatToIntBits(f);
+            result = prime * result + i;
+            result = prime * result + ((s == null) ? 0 : s.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleRecord other = (SimpleRecord) obj;
+            if (d != other.d)
+            {
+                return false;
+            }
+            if (Float.floatToIntBits(f) != Float.floatToIntBits(other.f))
+            {
+                return false;
+            }
+            if (i != other.i)
+            {
+                return false;
+            }
+            if (s == null)
+            {
+                if (other.s != null)
+                {
+                    return false;
+                }
+            } else if (!s.equals(other.s))
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleRecord [f=" + f + ", i=" + i + ", d=" + d + ", s=" + s + "]";
+        }
+
+    }
+
+    static class SimpleInheretingRecord extends SimpleRecord
+    {
+        SimpleInheretingRecord()
+        {
+        }
+
+        @CompoundElement(memberName = "ll", dimensions =
+            { 2, 3 })
+        private long[][] l;
+
+        public SimpleInheretingRecord(float f, int i, short d, String s, long[][] l)
+        {
+            super(f, i, d, s);
+            this.l = l;
+        }
+
+        public long[][] getL()
+        {
+            return l;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = super.hashCode();
+            result = prime * result + Arrays.hashCode(l);
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (!super.equals(obj))
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleInheretingRecord other = (SimpleInheretingRecord) obj;
+            if (ArrayUtils.isEquals(l, other.l) == false)
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleInheretingRecord [l=" + ArrayUtils.toString(l) + ", getF()=" + getF()
+                    + ", getI()=" + getI() + ", getD()=" + getD() + ", getS()=" + getS() + "]";
+        }
+    }
+
+    static class SimpleInheretingRecord2 extends SimpleRecord
+    {
+        SimpleInheretingRecord2()
+        {
+        }
+
+        private long[][] ll;
+
+        public SimpleInheretingRecord2(float f, int i, short d, String s, long[][] l)
+        {
+            super(f, i, d, s);
+            this.ll = l;
+        }
+
+        public long[][] getL()
+        {
+            return ll;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = super.hashCode();
+            result = prime * result + Arrays.hashCode(ll);
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (!super.equals(obj))
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleInheretingRecord2 other = (SimpleInheretingRecord2) obj;
+            if (ArrayUtils.isEquals(ll, other.ll) == false)
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleInheretingRecord2 [l=" + ArrayUtils.toString(ll) + ", getF()=" + getF()
+                    + ", getI()=" + getI() + ", getD()=" + getD() + ", getS()=" + getS() + "]";
+        }
+    }
+
+    static class SimpleInheretingRecord3 extends SimpleRecord
+    {
+        SimpleInheretingRecord3()
+        {
+        }
+
+        private MDLongArray ll;
+
+        public SimpleInheretingRecord3(float f, int i, short d, String s, long[][] l)
+        {
+            super(f, i, d, s);
+            this.ll = new MDLongArray(l);
+        }
+
+        public MDLongArray getL()
+        {
+            return ll;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = super.hashCode();
+            result = prime * result + ll.hashCode();
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (!super.equals(obj))
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            SimpleInheretingRecord3 other = (SimpleInheretingRecord3) obj;
+            if (ll.equals(other.ll) == false)
+            {
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "SimpleInheretingRecord3 [l=" + ll + ", getF()=" + getF() + ", getI()=" + getI()
+                    + ", getD()=" + getD() + ", getS()=" + getS() + "]";
+        }
+    }
+
+    enum FruitEnum
+    {
+        APPLE, ORANGE, CHERRY
+    }
+
+    enum ColorEnum
+    {
+        RED, GEEN, BLUE, BLACK
+    }
+
+    enum StateEnum
+    {
+        PREPARING, READY, ONGOING, DONE
+    }
+
+    static class JavaEnumCompoundType
+    {
+        FruitEnum fruit;
+
+        JavaEnumCompoundType()
+        {
+        }
+
+        JavaEnumCompoundType(FruitEnum fruit)
+        {
+            this.fruit = fruit;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((fruit == null) ? 0 : fruit.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            JavaEnumCompoundType other = (JavaEnumCompoundType) obj;
+            if (fruit != other.fruit)
+            {
+                return false;
+            }
+            return true;
+        }
+    }
+
+    static class JavaMultipleEnumsCompoundType
+    {
+        int i; // Will be ignored, just to be sure a non-enum member doesn't hurt.
+
+        FruitEnum fruit;
+
+        ColorEnum color;
+
+        StateEnum state;
+
+        JavaMultipleEnumsCompoundType()
+        {
+        }
+
+        JavaMultipleEnumsCompoundType(FruitEnum fruit, ColorEnum color, StateEnum state)
+        {
+            this.fruit = fruit;
+            this.color = color;
+            this.state = state;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((color == null) ? 0 : color.hashCode());
+            result = prime * result + ((fruit == null) ? 0 : fruit.hashCode());
+            result = prime * result + ((state == null) ? 0 : state.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+            {
+                return true;
+            }
+            if (obj == null)
+            {
+                return false;
+            }
+            if (getClass() != obj.getClass())
+            {
+                return false;
+            }
+            JavaMultipleEnumsCompoundType other = (JavaMultipleEnumsCompoundType) obj;
+            if (color != other.color)
+            {
+                return false;
+            }
+            if (fruit != other.fruit)
+            {
+                return false;
+            }
+            if (state != other.state)
+            {
+                return false;
+            }
+            return true;
+        }
+    }
+
+    @Test
+    public void testInferredCompoundType()
+    {
+        final File file = new File(workingDirectory, "inferredCompoundType.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5CompoundType<SimpleRecord> typeW =
+                writer.compound().getInferredType(SimpleRecord.class);
+        writer.compound().write("sc", typeW, new SimpleRecord(2.2f, 17, (short) 10, "test"));
+        long[][] arrayWritten = new long[][]
+            {
+                { 1, 2, 3 },
+                { 4, 5, 6 } };
+        final HDF5CompoundType<SimpleInheretingRecord> itype =
+                writer.compound().getInferredType(SimpleInheretingRecord.class);
+        writer.compound().write("sci", itype,
+                new SimpleInheretingRecord(-3.1f, 42, (short) 17, "some", arrayWritten));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().configureForReading(file).reader();
+        final HDF5CompoundType<SimpleRecord> typeR =
+                reader.compound().getInferredType(SimpleRecord.class);
+        final SimpleRecord recordRead = reader.compound().read("sc", typeR);
+        final HDF5CompoundType<SimpleInheretingRecord> inheritedTypeR =
+                reader.compound().getInferredType(SimpleInheretingRecord.class);
+        final SimpleInheretingRecord recordInheritedRead =
+                reader.compound().read("sci", inheritedTypeR);
+        final HDF5CompoundMemberInformation[] info =
+                reader.compound().getMemberInfo(SimpleRecord.class);
+        assertEquals("d", info[2].getName());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_SECONDS, info[2].getType()
+                .tryGetTypeVariant());
+        reader.close();
+
+        assertEquals(2.2f, recordRead.getF());
+        assertEquals(17, recordRead.getI());
+        assertEquals("test", recordRead.getS());
+
+        assertEquals(-3.1f, recordInheritedRead.getF());
+        assertEquals(42, recordInheritedRead.getI());
+        assertEquals("some", recordInheritedRead.getS());
+        assertTrue(equals(arrayWritten, recordInheritedRead.getL()));
+    }
+
+    static class CompleteMappedCompound
+    {
+        @CompoundElement
+        float a;
+
+        @CompoundElement
+        int b;
+
+        @CompoundElement(variableLength = true)
+        String c;
+
+        public CompleteMappedCompound()
+        {
+        }
+
+        public CompleteMappedCompound(float a, int b, String c)
+        {
+            this.a = a;
+            this.b = b;
+            this.c = c;
+        }
+
+    }
+
+    @CompoundType(mapAllFields = false)
+    static class IncompleteMappedCompound
+    {
+        @CompoundElement
+        float a;
+
+        @CompoundElement
+        int b;
+
+        // unmapped
+        String c;
+
+        public IncompleteMappedCompound()
+        {
+        }
+
+        public IncompleteMappedCompound(float a, int b, String c)
+        {
+            this.a = a;
+            this.b = b;
+            this.c = c;
+        }
+
+    }
+
+    @Test
+    public void testInferredIncompletelyMappedCompoundType()
+    {
+        final File file = new File(workingDirectory, "inferredIncompletelyMappedCompoundType.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.compound().write("cpd", new CompleteMappedCompound(-1.111f, 11, "Not mapped"));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().configureForReading(file).reader();
+        final IncompleteMappedCompound cpd =
+                reader.compound().read("cpd", IncompleteMappedCompound.class);
+        final HDF5CompoundType<IncompleteMappedCompound> type =
+                reader.compound().getType("incomplete_mapped_compound",
+                        IncompleteMappedCompound.class, false,
+                        HDF5CompoundMemberMapping.inferMapping(IncompleteMappedCompound.class));
+        final IncompleteMappedCompound cpd2 = reader.compound().read("cpd", type);
+        reader.close();
+        assertEquals(-1.111f, cpd.a);
+        assertEquals(11, cpd.b);
+        assertEquals("Not mapped", cpd.c);
+        assertEquals(-1.111f, cpd2.a);
+        assertEquals(11, cpd2.b);
+        assertNull(cpd2.c);
+    }
+
+    @Test
+    public void testNameChangeInCompoundMapping()
+    {
+        final File file = new File(workingDirectory, "nameChangeInCompoundMapping.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final String typeName = "a_float";
+        HDF5CompoundType<RecordC> compoundTypeInt =
+                writer.compound().getInferredType(typeName, RecordC.class);
+        final RecordC recordWritten = new RecordC(33.33333f);
+        writer.compound().write("/testCompound", compoundTypeInt, recordWritten);
+        writer.close();
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().configureForReading(file).performNumericConversions()
+                        .reader();
+        HDF5CompoundType<RecordD> compoundTypeFloat =
+                reader.compound().getNamedType(typeName, RecordD.class);
+        final RecordD recordRead = reader.compound().read("/testCompound", compoundTypeFloat);
+        assertEquals(recordWritten.a, recordRead.b);
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteCompound()
+    {
+        final File file = new File(workingDirectory, "overwriteCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<RecordC> compoundTypeFloat =
+                writer.compound().getInferredType(RecordC.class);
+        final RecordC recordWritten = new RecordC(33.33333f);
+        writer.compound().write("/testCompound", compoundTypeFloat, recordWritten);
+        writer.close();
+        writer = HDF5FactoryProvider.get().open(file);
+        final RecordE recordWritten2 = new RecordE(-1);
+        HDF5CompoundType<RecordE> compoundTypeInt =
+                writer.compound().getInferredType(RecordE.class);
+        writer.compound().write("/testCompound", compoundTypeInt, recordWritten2);
+        writer.close();
+
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().configureForReading(file).performNumericConversions()
+                        .reader();
+        HDF5CompoundType<RecordE> compoundTypeInt2 = reader.compound().getNamedType(RecordE.class);
+        assertEquals(1, compoundTypeInt2.getCompoundMemberInformation().length);
+        assertEquals(HDF5DataClass.INTEGER, compoundTypeInt2.getCompoundMemberInformation()[0]
+                .getType().getDataClass());
+        assertEquals(-1, reader.compound().read("/testCompound", compoundTypeInt2).a);
+        reader.close();
+    }
+
+    @Test
+    public void testOverwriteCompoundKeepType()
+    {
+        final File file = new File(workingDirectory, "overwriteCompoundKeepType.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<RecordC> compoundTypeFloat =
+                writer.compound().getInferredType(RecordC.class);
+        final RecordC recordWritten = new RecordC(33.33333f);
+        writer.compound().write("/testCompound", compoundTypeFloat, recordWritten);
+        writer.close();
+        writer = HDF5FactoryProvider.get().configure(file).keepDataSetsIfTheyExist().writer();
+        final RecordE recordWritten2 = new RecordE(-1);
+        HDF5CompoundType<RecordE> compoundTypeInt =
+                writer.compound().getInferredType(RecordE.class);
+        writer.compound().write("/testCompound", compoundTypeInt, recordWritten2);
+        writer.close();
+
+        final IHDF5Reader reader =
+                HDF5FactoryProvider.get().configureForReading(file).performNumericConversions()
+                        .reader();
+        HDF5CompoundType<RecordE> compoundTypeInt2 =
+                reader.compound().getDataSetType("/testCompound", RecordE.class);
+        assertEquals(1, compoundTypeInt2.getCompoundMemberInformation().length);
+        assertEquals(HDF5DataClass.FLOAT, compoundTypeInt2.getCompoundMemberInformation()[0]
+                .getType().getDataClass());
+        assertEquals(-1, reader.compound().read("/testCompound", compoundTypeInt2).a);
+        reader.close();
+    }
+
+    static class SimpleRecordWithEnum
+    {
+        HDF5EnumerationValue e;
+
+        SimpleRecordWithEnum()
+        {
+        }
+
+        public SimpleRecordWithEnum(HDF5EnumerationValue e)
+        {
+            this.e = e;
+        }
+    }
+
+    @Test
+    public void testInferredCompoundTypedWithEnum()
+    {
+        final File file = new File(workingDirectory, "inferredCompoundTypeWithEnum.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final String[] alternatives = new String[257];
+        for (int i = 0; i < alternatives.length; ++i)
+        {
+            alternatives[i] = Integer.toString(i);
+        }
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final HDF5EnumerationType enumType = writer.enumeration().getType("type", alternatives);
+        final SimpleRecordWithEnum r =
+                new SimpleRecordWithEnum(new HDF5EnumerationValue(enumType, "3"));
+        final HDF5CompoundType<SimpleRecordWithEnum> typeW = writer.compound().getInferredType(r);
+        writer.compound().write("sce", typeW, r);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().configureForReading(file).reader();
+        final HDF5CompoundType<SimpleRecordWithEnum> typeR =
+                reader.compound().getNamedType(SimpleRecordWithEnum.class);
+        final SimpleRecordWithEnum recordRead = reader.compound().read("sce", typeR);
+        assertEquals("3", recordRead.e.getValue());
+        reader.close();
+
+    }
+
+    static class SimpleRecordWithEnumArray
+    {
+        @CompoundElement(dimensions = 5)
+        HDF5EnumerationValueArray e;
+
+        SimpleRecordWithEnumArray()
+        {
+        }
+
+        public SimpleRecordWithEnumArray(HDF5EnumerationValueArray e)
+        {
+            this.e = e;
+        }
+    }
+
+    @Test
+    public void testInferredCompoundTypeWithEnumArray()
+    {
+        final File file = new File(workingDirectory, "inferredCompoundTypeWithEnumArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final String[] alternatives = new String[512];
+        for (int i = 0; i < alternatives.length; ++i)
+        {
+            alternatives[i] = Integer.toString(i);
+        }
+        final HDF5EnumerationType enumType = writer.enumeration().getType("type", alternatives);
+        final SimpleRecordWithEnumArray r =
+                new SimpleRecordWithEnumArray(new HDF5EnumerationValueArray(enumType, new String[]
+                    { "3", "2", "1", "511", "3" }));
+        final HDF5CompoundType<SimpleRecordWithEnumArray> cType =
+                writer.compound().getInferredType(r);
+        writer.compound().write("sce", cType, r);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().configureForReading(file).reader();
+        final HDF5CompoundType<SimpleRecordWithEnumArray> typeR =
+                reader.compound().getNamedType(SimpleRecordWithEnumArray.class);
+        final SimpleRecordWithEnumArray recordRead = reader.compound().read("sce", typeR);
+        reader.close();
+
+        assertEquals(5, recordRead.e.getLength());
+        assertEquals("3", recordRead.e.getValue(0));
+        assertEquals("2", recordRead.e.getValue(1));
+        assertEquals("1", recordRead.e.getValue(2));
+        assertEquals("511", recordRead.e.getValue(3));
+        assertEquals("3", recordRead.e.getValue(4));
+    }
+
+    static class RecordWithMatrix
+    {
+        String s;
+
+        MDFloatArray fm;
+
+        public RecordWithMatrix()
+        {
+        }
+
+        RecordWithMatrix(String s, MDFloatArray fm)
+        {
+            this.s = s;
+            this.fm = fm;
+        }
+
+        static HDF5CompoundType<RecordWithMatrix> getHDF5Type(IHDF5Reader reader)
+        {
+            return reader.compound().getType(null, RecordWithMatrix.class, getMapping());
+        }
+
+        private static HDF5CompoundMemberMapping[] getMapping()
+        {
+            return new HDF5CompoundMemberMapping[]
+                { mapping("s").length(5), mapping("fm").dimensions(2, 2) };
+        }
+
+    }
+
+    @Test
+    public void testMDArrayCompound()
+    {
+        final File file = new File(workingDirectory, "mdArrayCompound.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<RecordWithMatrix> compoundTypeMatrix =
+                RecordWithMatrix.getHDF5Type(writer);
+        final RecordWithMatrix recordWritten =
+                new RecordWithMatrix("tag", new MDFloatArray(new float[][]
+                    {
+                        { 1, 2 },
+                        { 3, 4 } }));
+        writer.compound().write("/testCompound", compoundTypeMatrix, recordWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        HDF5CompoundType<RecordWithMatrix> compoundTypeMatrixRead =
+                RecordWithMatrix.getHDF5Type(reader);
+        final RecordWithMatrix recordRead =
+                reader.compound().read("/testCompound", compoundTypeMatrixRead);
+        assertEquals(recordWritten.s, recordRead.s);
+        assertEquals(recordWritten.fm, recordRead.fm);
+    }
+
+    @Test
+    public void testMDArrayCompoundArray()
+    {
+        final File file = new File(workingDirectory, "mdArrayCompoundArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        HDF5CompoundType<RecordWithMatrix> compoundTypeMatrix =
+                RecordWithMatrix.getHDF5Type(writer);
+        final RecordWithMatrix[] recordArrayWritten = new RecordWithMatrix[]
+            { new RecordWithMatrix("tag1", new MDFloatArray(new float[][]
+                {
+                    { 1, 2 },
+                    { 3, 4 } })), new RecordWithMatrix("tag2", new MDFloatArray(new float[][]
+                {
+                    { 10, 20 },
+                    { 30, 40 } })), new RecordWithMatrix("tag3", new MDFloatArray(new float[][]
+                {
+                    { 100, 200 },
+                    { 300, 400 } })), };
+        writer.compound().writeArray("/testCompoundArray", compoundTypeMatrix, recordArrayWritten);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        HDF5CompoundType<RecordWithMatrix> compoundTypeMatrixRead =
+                RecordWithMatrix.getHDF5Type(reader);
+        final RecordWithMatrix[] recordReadArray =
+                reader.compound().readArray("/testCompoundArray", compoundTypeMatrixRead);
+        assertEquals(3, recordReadArray.length);
+        for (int i = 0; i < recordArrayWritten.length; ++i)
+        {
+            assertEquals("" + i, recordArrayWritten[i].s, recordReadArray[i].s);
+            assertEquals("" + i, recordArrayWritten[i].fm, recordReadArray[i].fm);
+        }
+    }
+
+    @Test
+    public void testSetDataSetSize()
+    {
+        final File file = new File(workingDirectory, "testSetDataSetSize.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.int8().createArray("ds", 0, 10);
+        writer.object().setDataSetSize("ds", 20);
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final HDF5DataSetInformation dsInfo = reader.getDataSetInformation("ds");
+        assertEquals(20, dsInfo.getSize());
+        assertTrue(dsInfo.isSigned());
+        int idx = 0;
+        for (byte b : reader.int8().readArray("ds"))
+        {
+            assertEquals("Position " + (idx++), 0, b);
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testNumericConversion()
+    {
+        final File file = new File(workingDirectory, "numericConversions.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.float32().write("pi", 3.14159f);
+        writer.float32().write("INFINITY", Float.POSITIVE_INFINITY);
+        writer.float64().write("DINFINITY", Double.NEGATIVE_INFINITY);
+        writer.float64().write("verySmallFloat", 1e-100);
+        writer.float64().write("veryLargeFloat", 1e+100);
+        writer.float64().setAttr("pi", "eps", 1e-5);
+        writer.int64().write("smallInteger", 17L);
+        writer.int64().write("largeInteger", Long.MAX_VALUE);
+        writer.close();
+        final IHDF5ReaderConfigurator config =
+                HDF5FactoryProvider.get().configureForReading(file).performNumericConversions();
+        // If this platform doesn't support numeric conversions, the test would fail.
+        if (config.platformSupportsNumericConversions() == false)
+        {
+            return;
+        }
+        final IHDF5Reader reader = config.reader();
+        assertEquals(3.14159, reader.float64().read("pi"), 1e-5);
+        assertEquals(3, reader.int32().read("pi"));
+        assertEquals(1e-5f, reader.float32().getAttr("pi", "eps"), 1e-9);
+        assertEquals(17, reader.int8().read("smallInteger"));
+        assertEquals(0.0f, reader.float32().read("verySmallFloat"));
+        assertEquals(Double.POSITIVE_INFINITY, reader.float64().read("INFINITY"));
+        try
+        {
+            reader.int32().read("largeInteger");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.float32().read("veryLargeFloat");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.int64().read("veryLargeFloat");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        // In HDF5 up to 1.8.10, numeric conversions on sparc don't detect overflows
+        // for INFINITY and DINFINITY values.
+        if (OSUtilities.getCPUArchitecture().startsWith("sparc"))
+        {
+            return;
+        }
+        try
+        {
+            reader.float32().read("DINFINITY");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.int64().read("INFINITY");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testNumericConversionWithNumericConversionsSwitchedOff()
+    {
+        final File file =
+                new File(workingDirectory, "numericConversionWithNumericConversionsSwitchedOff.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.float32().write("pi", 3.14159f);
+        writer.float32().write("one", 1.0f);
+        writer.float32().write("INFINITY", Float.POSITIVE_INFINITY);
+        writer.float64().write("DINFINITY", Double.NEGATIVE_INFINITY);
+        writer.float64().write("verySmallFloat", 1e-100);
+        writer.float64().write("veryLargeFloat", 1e+100);
+        writer.float64().setAttr("pi", "eps", 1e-5);
+        writer.int64().write("smallInteger", 17L);
+        writer.int64().write("largeInteger", Long.MAX_VALUE);
+        writer.close();
+        final IHDF5Reader reader = HDF5Factory.openForReading(file);
+        // <<< Don't try this at home - it is not clean: START
+        assertEquals(3.14159, reader.float64().read("pi"), 1e-5);
+        // SPARC CPUs need numeric conversion to be switched on for this to work.
+        if (OSUtilities.getCPUArchitecture().startsWith("sparc") == false)
+        {
+            assertEquals(1, reader.int32().read("one"));
+            assertEquals(Double.POSITIVE_INFINITY, reader.float64().read("INFINITY"));
+        }
+        assertEquals(1e-5f, reader.float32().getAttr("pi", "eps"), 1e-9);
+        assertEquals(17, reader.int8().read("smallInteger"));
+        assertEquals(0.0f, reader.float32().read("verySmallFloat"));
+        // Don't try this at home - it is not clean: END >>>
+        try
+        {
+            reader.int32().read("largeInteger");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.float32().read("veryLargeFloat");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.int64().read("veryLargeFloat");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        // In HDF5 up to 1.8.10, numeric conversions on sparc don't detect overflows
+        // for INFINITY and DINFINITY values.
+        if (OSUtilities.getCPUArchitecture().startsWith("sparc"))
+        {
+            return;
+        }
+        try
+        {
+            reader.float32().read("DINFINITY");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        try
+        {
+            reader.int64().read("INFINITY");
+            fail("Failed to detect overflow");
+        } catch (HDF5DatatypeInterfaceException ex)
+        {
+            assertEquals(HDF5Constants.H5E_CANTCONVERT, ex.getMinorErrorNumber());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceOverwriteWithKeep()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceOverwriteWithKeep.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(file).keepDataSetsIfTheyExist().writer();
+        writer.string().write("a", "TestA");
+        writer.string().write("aa", "TestAA");
+        writer.reference().write("b", "aa");
+        writer.delete("a");
+        // If keepDataSetsIfTheyExist() was not given above, the dataset would be deleted, the
+        // header of the new dataset would be written at the old position of "a" and the object
+        // reference "b" would be dangling.
+        writer.string().write("aa", "TestX");
+        assertEquals("/aa", writer.reference().read("/b"));
+        writer.object().move("/aa", "/C");
+        assertEquals("/C", writer.reference().read("/b"));
+        writer.close();
+    }
+
+    @Test
+    public void testObjectReferenceOverwriteWithKeepOverridden()
+    {
+        final File file =
+                new File(workingDirectory, "testObjectReferenceOverwriteWithKeepOverridden.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(file).keepDataSetsIfTheyExist().writer();
+        writer.string().write("a", "TestA");
+        writer.string().write("aa", "TestAA");
+        writer.reference().write("b", "aa");
+        writer.delete("a");
+        // As we override keepDataSetsIfTheyExist() by
+        // HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE,
+        // the dataset will be deleted and the header of the new dataset will be written at the old
+        // position of "a", thus the object
+        // reference "b" will be dangling.
+        writer.string().write("aa", "TestX", HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE);
+        // Check for dangling reference.
+        assertEquals("", writer.reference().read("/b"));
+        writer.close();
+    }
+
+    @Test
+    public void testObjectReference()
+    {
+        final File file = new File(workingDirectory, "testObjectReference.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a", "TestA");
+        writer.reference().write("b", "a");
+        assertEquals("/a", writer.reference().read("/b"));
+        writer.object().move("/a", "/C");
+        assertEquals("/C", writer.reference().read("/b"));
+        assertEquals("TestA", writer.readString(writer.reference().read("/b", false)));
+        writer.close();
+    }
+
+    @Test
+    public void testObjectReferenceArray()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a1", "TestA");
+        writer.string().write("a2", "TestB");
+        writer.string().write("a3", "TestC");
+        writer.reference().writeArray("b", new String[]
+            { "a1", "a2", "a3" });
+        assertTrue(ArrayUtils.isEquals(new String[]
+            { "/a1", "/a2", "/a3" }, writer.reference().readArray("/b")));
+        writer.object().move("/a1", "/C");
+        assertTrue(ArrayUtils.isEquals(new String[]
+            { "/C", "/a2", "/a3" }, writer.reference().readArray("/b")));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertTrue(ArrayUtils.isEquals(new String[]
+            { "/C", "/a2", "/a3" }, reader.reference().readArray("/b")));
+        final String[] refs = reader.reference().readArray("/b", false);
+        assertEquals("TestA", reader.string().read(refs[0]));
+        assertEquals("/C", reader.reference().resolvePath(refs[0]));
+        assertEquals("TestB", reader.string().read(refs[1]));
+        assertEquals("/a2", reader.reference().resolvePath(refs[1]));
+        assertEquals("TestC", reader.string().read(refs[2]));
+        assertEquals("/a3", reader.reference().resolvePath(refs[2]));
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceArrayBlockWise()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceArrayBlockWise.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final String[] completeArray = new String[16];
+        for (int i = 0; i < completeArray.length; ++i)
+        {
+            writer.string().write("a" + (i + 1), "TestA" + i);
+            completeArray[i] = "/a" + (i + 1);
+        }
+        writer.reference().createArray("b", completeArray.length, completeArray.length / 4,
+                HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+        final String[][] chunk = new String[4][4];
+        System.arraycopy(completeArray, 0, chunk[0], 0, 4);
+        System.arraycopy(completeArray, 4, chunk[1], 0, 4);
+        System.arraycopy(completeArray, 8, chunk[2], 0, 4);
+        System.arraycopy(completeArray, 12, chunk[3], 0, 4);
+        writer.reference().writeArrayBlock("b", chunk[0], 0);
+        writer.reference().writeArrayBlock("b", chunk[2], 2);
+        writer.reference().writeArrayBlock("b", chunk[1], 1);
+        writer.reference().writeArrayBlock("b", chunk[3], 3);
+        assertTrue(ArrayUtils.isEquals(completeArray, writer.reference().readArray("/b")));
+        writer.object().move("/a1", "/C");
+        completeArray[0] = "/C";
+        chunk[0][0] = "/C";
+        assertTrue(ArrayUtils.isEquals(completeArray, writer.reference().readArray("/b")));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        int idx = 0;
+        for (HDF5DataBlock<String[]> block : reader.reference().getArrayNaturalBlocks("b"))
+        {
+            assertTrue("" + idx, ArrayUtils.isEquals(chunk[idx++], block.getData()));
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceMDArray()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceMDArray.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a1", "TestA");
+        writer.string().write("a2", "TestA");
+        writer.string().write("a3", "TestA");
+        writer.string().write("a4", "TestA");
+        writer.reference().writeMDArray("b", new MDArray<String>(new String[]
+            { "a1", "a2", "a3", "a4" }, new int[]
+            { 2, 2 }));
+        assertEquals(new MDArray<String>(new String[]
+            { "/a1", "/a2", "/a3", "/a4" }, new int[]
+            { 2, 2 }), writer.reference().readMDArray("/b"));
+        writer.object().move("/a1", "/C");
+        assertEquals(new MDArray<String>(new String[]
+            { "/C", "/a2", "/a3", "/a4" }, new int[]
+            { 2, 2 }), writer.reference().readMDArray("/b"));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        assertEquals(new MDArray<String>(new String[]
+            { "/C", "/a2", "/a3", "/a4" }, new int[]
+            { 2, 2 }), reader.reference().readMDArray("/b"));
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceMDArrayBlockWise()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceMDArrayBlockWise.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        final String[] completeArray = new String[16];
+        for (int i = 0; i < completeArray.length; ++i)
+        {
+            writer.string().write("a" + (i + 1), "TestA" + i);
+            completeArray[i] = "/a" + (i + 1);
+        }
+        writer.reference().createMDArray("b", new long[]
+            { 4, 4 }, new int[]
+            { 1, 4 }, HDF5IntStorageFeatures.INT_NO_COMPRESSION);
+        final String[][] chunk = new String[4][4];
+        System.arraycopy(completeArray, 0, chunk[0], 0, 4);
+        System.arraycopy(completeArray, 4, chunk[1], 0, 4);
+        System.arraycopy(completeArray, 8, chunk[2], 0, 4);
+        System.arraycopy(completeArray, 12, chunk[3], 0, 4);
+        writer.reference().writeMDArrayBlock("b", new MDArray<String>(chunk[0], new int[]
+            { 1, 4 }), new long[]
+            { 0, 0 });
+        writer.reference().writeMDArrayBlock("b", new MDArray<String>(chunk[2], new int[]
+            { 1, 4 }), new long[]
+            { 2, 0 });
+        writer.reference().writeMDArrayBlock("b", new MDArray<String>(chunk[1], new int[]
+            { 1, 4 }), new long[]
+            { 1, 0 });
+        writer.reference().writeMDArrayBlock("b", new MDArray<String>(chunk[3], new int[]
+            { 1, 4 }), new long[]
+            { 3, 0 });
+        assertEquals(new MDArray<String>(completeArray, new int[]
+            { 4, 4 }), writer.reference().readMDArray("/b"));
+        writer.object().move("/a1", "/C");
+        completeArray[0] = "/C";
+        chunk[0][0] = "/C";
+        assertEquals(new MDArray<String>(completeArray, new int[]
+            { 4, 4 }), writer.reference().readMDArray("/b"));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        int idx = 0;
+        for (HDF5MDDataBlock<MDArray<String>> block : reader.reference().getMDArrayNaturalBlocks(
+                "b"))
+        {
+            assertEquals("" + idx, new MDArray<String>(chunk[idx++], new int[]
+                { 1, 4 }), block.getData());
+        }
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceAttribute()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceAttribute.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a", "TestA");
+        writer.string().write("b", "TestB");
+        writer.reference().setAttr("a", "partner", "b");
+        assertEquals("/b", writer.reference().getAttr("/a", "partner"));
+        writer.object().move("/b", "/C");
+        assertEquals("/C", writer.reference().getAttr("/a", "partner"));
+        writer.close();
+    }
+
+    @Test
+    public void testObjectReferenceArrayAttribute()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceArrayAttribute.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a1", "TestA1");
+        writer.string().write("a2", "TestA2");
+        writer.string().write("a3", "TestA3");
+        writer.string().write("b", "TestB");
+        writer.reference().setArrayAttr("b", "partner", new String[]
+            { "a1", "a2", "a3" });
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final String[] referencesRead = reader.reference().getArrayAttr("b", "partner");
+        assertEquals(3, referencesRead.length);
+        assertEquals("/a1", referencesRead[0]);
+        assertEquals("/a2", referencesRead[1]);
+        assertEquals("/a3", referencesRead[2]);
+        reader.close();
+    }
+
+    @Test
+    public void testObjectReferenceMDArrayAttribute()
+    {
+        final File file = new File(workingDirectory, "testObjectReferenceMDArrayAttribute.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        final IHDF5Writer writer = HDF5FactoryProvider.get().open(file);
+        writer.string().write("a1", "TestA1");
+        writer.string().write("a2", "TestA2");
+        writer.string().write("a3", "TestA3");
+        writer.string().write("a4", "TestA4");
+        writer.string().write("b", "TestB");
+        writer.reference().setMDArrayAttr("b", "partner", new MDArray<String>(new String[]
+            { "a1", "a2", "a3", "a4" }, new int[]
+            { 2, 2 }));
+        writer.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(file);
+        final MDArray<String> referencesRead = reader.reference().getMDArrayAttr("b", "partner");
+        assertTrue(ArrayUtils.isEquals(new int[]
+            { 2, 2 }, referencesRead.dimensions()));
+        assertEquals("/a1", referencesRead.get(0, 0));
+        assertEquals("/a2", referencesRead.get(0, 1));
+        assertEquals("/a3", referencesRead.get(1, 0));
+        assertEquals("/a4", referencesRead.get(1, 1));
+        reader.close();
+    }
+
+    @Test
+    public void testHDF5FileDetection() throws IOException
+    {
+        final File hdf5File = new File(workingDirectory, "testHDF5FileDetection.h5");
+        hdf5File.delete();
+        assertFalse(hdf5File.exists());
+        hdf5File.deleteOnExit();
+        final IHDF5Writer writer = HDF5Factory.open(hdf5File);
+        writer.string().write("a", "someString");
+        writer.close();
+        assertTrue(HDF5Factory.isHDF5File(hdf5File));
+
+        final File noHdf5File = new File(workingDirectory, "testHDF5FileDetection.h5");
+        noHdf5File.delete();
+        assertFalse(noHdf5File.exists());
+        noHdf5File.deleteOnExit();
+        FileUtils.writeByteArrayToFile(noHdf5File, new byte[]
+            { 1, 2, 3, 4 });
+        assertFalse(HDF5Factory.isHDF5File(noHdf5File));
+    }
+
+    @Test
+    public void testHDFJavaLowLevel()
+    {
+        final File file = new File(workingDirectory, "testHDFJavaLowLevel.h5");
+        file.delete();
+        assertFalse(file.exists());
+        file.deleteOnExit();
+        int fileId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5F.H5Fcreate(file.getAbsolutePath(),
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_ACC_TRUNC,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT);
+        int groupId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5GLO.H5Gcreate(fileId, "constants",
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT);
+        int spcId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5S
+                        .H5Screate(ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR);
+        int dsId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dcreate(groupId, "pi",
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32LE, spcId,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite(dsId,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_FLOAT,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT, new float[]
+                    { 3.14159f });
+        ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dclose(dsId);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5S.H5Sclose(spcId);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5GLO.H5Gclose(groupId);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5F.H5Fclose(fileId);
+
+        fileId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5F.H5Fopen(file.getAbsolutePath(),
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5F_ACC_RDONLY,
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT);
+        spcId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5S
+                        .H5Screate(ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_SCALAR);
+        dsId =
+                ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dopen(fileId, "/constants/pi",
+                        ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT);
+        final float[] data = new float[1];
+        ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dread(dsId,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_NATIVE_FLOAT,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL,
+                ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT, data);
+        assertEquals(3.14159f, data[0], 0f);
+
+        ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dclose(dsId);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5S.H5Sclose(spcId);
+        ch.systemsx.cisd.hdf5.hdf5lib.H5F.H5Fclose(fileId);
+    }
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5SpeedTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5SpeedTest.java
new file mode 100644
index 0000000..64c9730
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5SpeedTest.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2008 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import ch.systemsx.cisd.hdf5.IHDF5WriterConfigurator.SyncMode;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF5SpeedTest
+{
+
+    public static void main(String[] args)
+    {
+        final float[] arr = new float[1000000];
+        for (int i = 0; i < arr.length; ++i)
+        {
+            arr[i] = (float) Math.random();
+        }
+        long start = System.currentTimeMillis();
+        final File f1 = new File("speedtest.jo");
+        try
+        {
+            for (int i = 0; i < 20; ++i)
+            {
+                f1.delete();
+                final ObjectOutputStream s = new ObjectOutputStream(new FileOutputStream(f1));
+                s.writeObject(arr);
+                s.close();
+            }
+        } catch (Exception ex)
+        {
+            ex.printStackTrace();
+        }
+        final float twj = (System.currentTimeMillis() - start) / 1000.f;
+        System.out.printf("Write Java Serialization: %.2f s\n",  twj);
+        final File f2 = new File("speedtest.h5");
+        f2.delete();
+        start = System.currentTimeMillis();
+        try
+        {
+            for (int i = 0; i < 20; ++i)
+            {
+                f2.delete();
+                final IHDF5Writer writer =
+                        HDF5FactoryProvider.get().configure(f2).syncMode(SyncMode.NO_SYNC).writer();
+                writer.float32().writeArray("/f", arr);
+                writer.close();
+            }
+        } catch (HDF5LibraryException ex)
+        {
+            System.err.println(ex.getHDF5ErrorStackAsString());
+        }
+        final float twh = (System.currentTimeMillis() - start) / 1000.f;
+        System.out.printf("Write HDF5: %.2f s (%.2f %%)\n",  twh, 100.0 * twh / twj);
+        start = System.currentTimeMillis();
+        try
+        {
+            for (int i = 0; i < 20; ++i)
+            {
+                final ObjectInputStream s = new ObjectInputStream(new FileInputStream(f1));
+                s.readObject();
+                s.close();
+            }
+        } catch (Exception ex)
+        {
+            ex.printStackTrace();
+        }
+        final float trj = (System.currentTimeMillis() - start) / 1000.f;
+        System.out.printf("Read Java Serialization: %.2f s\n",  trj);
+        start = System.currentTimeMillis();
+        try
+        {
+            for (int i = 0; i < 20; ++i)
+            {
+                final IHDF5Reader reader =
+                        HDF5FactoryProvider.get().configureForReading(f2).reader();
+                reader.float32().readArray("/f");
+                reader.close();
+            }
+        } catch (HDF5LibraryException ex)
+        {
+            System.err.println(ex.getHDF5ErrorStackAsString());
+        }
+        final float trh = (System.currentTimeMillis() - start) / 1000.f;
+        System.out.printf("Read HDF5: %.2f s (%.2f %%)\n",  trh, 100.0 * trh / trj);
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReaderTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReaderTest.java
new file mode 100644
index 0000000..1e90b53
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeDurationReaderTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2013 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.util.Arrays;
+
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.mdarray.MDLongArray;
+
+import static org.testng.AssertJUnit.*;
+
+/**
+ * Test cases for {@link HDF5TimeDurationReader}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5TimeDurationReaderTest
+{
+
+    @Test
+    public void testConvertUnit()
+    {
+        MDLongArray array = new MDLongArray(new int[]
+            { 5, 5, 5 });
+        int[] ofs = new int[]
+            { 1, 2, 1 };
+        int[] dims = new int[]
+            { 4, 3, 4 };
+        Arrays.fill(array.getAsFlatArray(), 1);
+        HDF5TimeDurationReader.convertUnit(array, HDF5TimeUnit.MINUTES, HDF5TimeUnit.SECONDS, dims,
+                ofs);
+        for (int x = 0; x < 5; ++x)
+        {
+            for (int y = 0; y < 5; ++y)
+            {
+                for (int z = 0; z < 5; ++z)
+                {
+                    final boolean converted =
+                            (x >= ofs[0] && x < ofs[0] + dims[0] && y >= ofs[1]
+                                    && y < ofs[1] + dims[1] && z >= ofs[2] && z < ofs[2] + dims[2]);
+                    assertEquals(converted ? 60 : 1, array.get(x, y, z));
+                }
+            }
+        }
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeUnitTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeUnitTest.java
new file mode 100644
index 0000000..3d2d4f4
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5TimeUnitTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import org.testng.annotations.Test;
+import static org.testng.AssertJUnit.*;
+
+/**
+ * Test cases for {@link HDF5TimeUnit}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5TimeUnitTest
+{
+
+    @Test
+    public void testConversion()
+    {
+        assertEquals(3, HDF5TimeUnit.HOURS.convert(10000L, HDF5TimeUnit.SECONDS));
+        assertEquals(10000L, HDF5TimeUnit.MILLISECONDS.convert(10L, HDF5TimeUnit.SECONDS));
+        assertEquals(120L, HDF5TimeUnit.MINUTES.convert(2L, HDF5TimeUnit.HOURS));
+        assertEquals(2L * 3600 * 1000 * 1000, HDF5TimeUnit.MICROSECONDS.convert(2L,
+                HDF5TimeUnit.HOURS));
+        // Overflow
+        assertEquals(Long.MIN_VALUE, HDF5TimeUnit.MICROSECONDS.convert(Long.MIN_VALUE / 24,
+                HDF5TimeUnit.DAYS));
+    }
+
+    @Test
+    public void testTypeVariant()
+    {
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_DAYS, HDF5TimeUnit.DAYS.getTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_HOURS, HDF5TimeUnit.HOURS.getTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_SECONDS, HDF5TimeUnit.SECONDS
+                .getTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MINUTES, HDF5TimeUnit.MINUTES
+                .getTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MILLISECONDS, HDF5TimeUnit.MILLISECONDS
+                .getTypeVariant());
+        assertEquals(HDF5DataTypeVariant.TIME_DURATION_MICROSECONDS, HDF5TimeUnit.MICROSECONDS
+                .getTypeVariant());
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5UtilsTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5UtilsTest.java
new file mode 100644
index 0000000..c7c1ed3
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5UtilsTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2012 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import org.testng.annotations.Test;
+
+import static org.testng.AssertJUnit.*;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF5UtilsTest
+{
+
+    /** The attribute to signal that this is a variant of the data type. */
+    static final String TYPE_VARIANT_ATTRIBUTE_OLD = "__TYPE_VARIANT__";
+
+    /**
+     * Returns the type variant attribute for the given <var>attributeName</var>.
+     */
+    static String createTypeVariantAttributeNameOld(String attributeName)
+    {
+        return TYPE_VARIANT_ATTRIBUTE_OLD + attributeName + "__";
+    }
+
+    @Test
+    public void testAttributeTypeVariantAttributeName()
+    {
+        assertEquals("__TYPE_VARIANT__abc__",
+                HDF5Utils.createAttributeTypeVariantAttributeName("abc", ""));
+        assertEquals(
+                "__TYPE_VARIANT____abc____",
+                HDF5Utils.createAttributeTypeVariantAttributeName(
+                        HDF5Utils.toHouseKeepingName("abc", ""), ""));
+        assertEquals("TYPE_VARIANT__abcXX",
+                HDF5Utils.createAttributeTypeVariantAttributeName("abc", "XX"));
+        assertEquals(
+                "TYPE_VARIANT__abcXXXX",
+                HDF5Utils.createAttributeTypeVariantAttributeName(
+                        HDF5Utils.toHouseKeepingName("abc", "XX"), "XX"));
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5WriteTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5WriteTest.java
new file mode 100644
index 0000000..fe12638
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/HDF5WriteTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import java.io.File;
+import java.util.BitSet;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+/**
+ * @author Bernd Rinn
+ */
+public class HDF5WriteTest
+{
+
+    public static void main(String[] args)
+    {
+        final BitSet bs = new BitSet();
+        bs.set(127);
+        bs.set(64);
+        bs.set(128);
+        // bs.set(191);
+        try
+        {
+            IHDF5Writer writer =
+                    HDF5FactoryProvider.get().configure(new File("test.h5")).overwrite().writer();
+            // writer.write("/Group1/SubGroup1/MyDataSet", new float[] { 1.0f, 2.0f, 3.0f, 4.0f });
+            // writer.link("/Group1/SubGroup1/MyDataSet", "/Group1/MyDataSet");
+            // writer.write("/Group1/MyDataSet", new float[] { 4.0f, 3.0f, 2.0f, 1.0f });
+            // writer.write("/Group1/MyDataSet", new double[] { 4.0, 3.0, 2.0, 1.0 });
+            writer.writeBitField("/Group1/MyBitSet", bs);
+            writer.float32().writeMatrix("/Group1/MyDataSet", new float[][]
+                {
+                    { 4, 3, 2, 1, 0, -1 },
+                    { 0, 1, 2, 3, 4, 5 } });
+            writer.int64().writeArray("/Group1/MyDataSet2", new long[]
+                { 4, 3, 2, 1 });
+            writer.int64().writeArray("/Group1/MyDataSet3", new long[]
+                { 1 });
+            // writer.write("/Group1/MyDataSet", new int[] { 4, 3, 2, 1 });
+            writer.object().createHardLink("/Group1/MyDataSet", "/Group1/SubGroup1/MyDataSet");
+            writer.string().write("/Group1/MyString", "Und schon wieder die Geschichte vom Pferd!");
+            writer.string().setAttr("/Group1/MyDataSet", "foo", "Die Geschichte vom Pferd");
+            // writer.addAttribute("/Group1/SubGroup1/MyDataSet", "foo", "No story");
+            writer.float64().setAttr("/", "version", 17.0);
+            writer.bool().setAttr("/Group1", "active", true);
+            writer.int8().writeArray("/empty", new byte[0]);
+            writer.close();
+        } catch (HDF5LibraryException ex)
+        {
+            System.err.println(ex.getHDF5ErrorStackAsString());
+            ex.printStackTrace();
+        }
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/MatrixUtilsTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/MatrixUtilsTest.java
new file mode 100644
index 0000000..b3ed30e
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/MatrixUtilsTest.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static org.testng.AssertJUnit.assertFalse;
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.util.Arrays;
+
+import org.testng.annotations.Test;
+
+/**
+ * Tests for {@link MatrixUtils}. 
+ *
+ * @author Bernd Rinn
+ */
+public class MatrixUtilsTest
+{
+    @Test
+    public void testIncrementIdx()
+    {
+        int[] blockDims = new int[] { 2, 2, 2 }; 
+        int[] offset = new int[] { 1, 2, 3 }; 
+        int[] idx = offset.clone();
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 1, 2, 4 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 1, 3, 3 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 1, 3, 4 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 2, 2, 3 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 2, 2, 4 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 2, 3, 3 }, idx));
+        assertTrue(MatrixUtils.incrementIdx(idx, blockDims, offset));
+        assertTrue(Arrays.toString(idx), Arrays.equals(new int[] { 2, 3, 4 }, idx));
+        assertFalse(MatrixUtils.incrementIdx(idx, blockDims, offset));
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/TestLowLevelHDF5.java b/sourceTest/java/ch/systemsx/cisd/hdf5/TestLowLevelHDF5.java
new file mode 100644
index 0000000..4938ed1
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/TestLowLevelHDF5.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2008 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5F.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5S.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5T.*;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.*;
+
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+public class TestLowLevelHDF5
+{
+
+    static class Container
+    {
+        String s;
+        
+        Container()
+        {
+        }
+
+    }
+
+    public static void main(String[] args) throws Exception
+    {
+        System.out.println(HDF5Constants.H5S_MAX_RANK);
+        System.exit(0);
+        Container[] cont = new Container[1];
+        cont[0] = new Container();
+        cont[0].s = "aaa";
+        long[] dims = new long[]
+            { cont.length };
+        int fileId = H5Fcreate("compoundTest.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+        int dataSpaceId = H5Screate_simple(1, dims, dims);
+        
+        int dataTypeId = H5Tcreate(H5T_COMPOUND, 5);
+        int stringDataType = H5Tcopy(H5T_C_S1);
+        H5Tset_size(stringDataType, 5);
+        H5Tinsert(dataTypeId, "s", 0, stringDataType);
+        int dataSetId =
+                H5Dcreate(fileId, "ds", dataTypeId, dataSpaceId, H5P_DEFAULT, H5P_DEFAULT,
+                        H5P_DEFAULT);
+        H5Dwrite(dataSetId, dataTypeId, H5S_ALL, H5S_ALL, H5P_DEFAULT, (cont[0].s + '\0').getBytes());
+        H5Tclose(dataTypeId);
+        H5Sclose(dataSpaceId);
+        H5Dclose(dataSetId);
+        H5Fclose(fileId);
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/UnsignedIntUtilsTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/UnsignedIntUtilsTest.java
new file mode 100644
index 0000000..955fd57
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/UnsignedIntUtilsTest.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2013 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.math.BigInteger;
+
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+
+/**
+ * Test cases for{@link UnsignedIntUtils}.
+ * 
+ * @author Bernd Rinn
+ */
+public class UnsignedIntUtilsTest
+{
+
+    @Test
+    public void testToInt64()
+    {
+        final BigInteger veryLarge = new BigInteger("2").pow(64).subtract(new BigInteger("100"));
+        final long int64 = UnsignedIntUtils.toInt64(veryLarge);
+        assertTrue(int64 < 0);
+        final BigInteger veryLarge2 = new BigInteger(1, NativeData.longToByte(new long[]
+            { int64 }, ByteOrder.BIG_ENDIAN));
+        assertEquals(veryLarge, veryLarge2);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testToInt64_Overflow()
+    {
+        final BigInteger tooLarge = new BigInteger("2").pow(64);
+        UnsignedIntUtils.toInt64(tooLarge);
+    }
+
+    @Test
+    public void testToUint32()
+    {
+        final long veryLarge = (1L << 32L) - 17;
+        final int veryLargeInt = UnsignedIntUtils.toInt32(veryLarge);
+        assertTrue(veryLargeInt < 0);
+        assertEquals(veryLarge, UnsignedIntUtils.toUint32(veryLargeInt));
+    }
+
+    @Test
+    public void testToUint16()
+    {
+        final int veryLarge = 40000;
+        final short veryLargeShort = UnsignedIntUtils.toInt16(veryLarge);
+        assertTrue(veryLargeShort < 0);
+        assertEquals(veryLarge, UnsignedIntUtils.toUint16(veryLargeShort));
+    }
+
+    @Test
+    public void testToUint8()
+    {
+        final short veryLarge = 199;
+        final byte veryLargeByte = UnsignedIntUtils.toInt8(veryLarge);
+        assertTrue(veryLargeByte < 0);
+        assertEquals(veryLarge, UnsignedIntUtils.toUint8(veryLargeByte));
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategyTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategyTest.java
new file mode 100644
index 0000000..0603aed
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/ArchivingStrategyTest.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2012 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertFalse;
+import static org.testng.AssertJUnit.assertTrue;
+
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.hdf5.h5ar.ArchivingStrategy.CompressionStrategy;
+
+/**
+ * Test cases for {@link ArchivingStrategy}.
+ * 
+ * @author Bernd Rinn
+ */
+public class ArchivingStrategyTest
+{
+    @Test
+    public void testCompressDefault()
+    {
+        assertEquals(CompressionStrategy.COMPRESS_NOTHING,
+                ArchivingStrategy.DEFAULT_NO_COMPRESSION.getCompressionStrategy());
+        assertFalse(ArchivingStrategy.DEFAULT_NO_COMPRESSION.doCompress("/test.txt"));
+    }
+
+    @Test
+    public void testCompressDefaultWithCompression()
+    {
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS,
+                ArchivingStrategy.DEFAULT.getCompressionStrategy());
+        assertTrue(ArchivingStrategy.DEFAULT.doCompress("/test.txt"));
+        assertFalse(ArchivingStrategy.DEFAULT.doCompress("/test.txt.gz"));
+        assertFalse(ArchivingStrategy.DEFAULT.doCompress("/test.txt.bz2"));
+        assertFalse(ArchivingStrategy.DEFAULT.doCompress("/test.txt.zip"));
+    }
+
+    @Test
+    public void testCompressAll()
+    {
+        final ArchivingStrategy strategy = new ArchivingStrategy().compressAll();
+        assertEquals(CompressionStrategy.COMPRESS_ALL, strategy.getCompressionStrategy());
+        assertTrue(strategy.doCompress("/test.txt"));
+    }
+
+    @Test
+    public void testCompressBlackList()
+    {
+        final ArchivingStrategy strategyCompressAll =
+                new ArchivingStrategy().compressAll().addToCompressionBlackList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS,
+                strategyCompressAll.getCompressionStrategy());
+        final ArchivingStrategy strategy =
+                new ArchivingStrategy().compressAll().addToCompressionBlackList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS, strategy.getCompressionStrategy());
+        assertTrue(strategyCompressAll.doCompress("/test.dat"));
+        assertFalse(strategyCompressAll.doCompress("/test.txt"));
+        assertTrue(strategy.doCompress("/test.dat"));
+        assertFalse(strategy.doCompress("/test.txt"));
+    }
+
+    @Test
+    public void testCompressWhiteList()
+    {
+        final ArchivingStrategy strategyCompressAll =
+                new ArchivingStrategy().compressAll().addToCompressionWhiteList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS,
+                strategyCompressAll.getCompressionStrategy());
+        final ArchivingStrategy strategy =
+                new ArchivingStrategy().addToCompressionWhiteList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS, strategy.getCompressionStrategy());
+        assertFalse(strategyCompressAll.doCompress("/test.dat"));
+        assertTrue(strategyCompressAll.doCompress("/test.txt"));
+        assertFalse(strategy.doCompress("/test.dat"));
+        assertTrue(strategy.doCompress("/test.txt"));
+    }
+
+    @Test
+    public void testCompressBlackWhiteList()
+    {
+        final ArchivingStrategy strategyCompressAll =
+                new ArchivingStrategy().compressAll().addToCompressionBlackList(".*a[^/]*\\.txt")
+                        .addToCompressionWhiteList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS,
+                strategyCompressAll.getCompressionStrategy());
+        final ArchivingStrategy strategy =
+                new ArchivingStrategy().addToCompressionBlackList(".*a[^/]*\\.txt")
+                        .addToCompressionWhiteList(".*\\.txt");
+        assertEquals(CompressionStrategy.USE_BLACK_WHITE_LISTS, strategy.getCompressionStrategy());
+        assertFalse(strategyCompressAll.doCompress("/test.dat"));
+        assertTrue(strategyCompressAll.doCompress("/test.txt"));
+        assertFalse(strategyCompressAll.doCompress("/atest.txt"));
+        assertFalse(strategy.doCompress("/test.dat"));
+        assertTrue(strategy.doCompress("/test.txt"));
+        assertFalse(strategy.doCompress("/atest.txt"));
+    }
+
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdaterTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdaterTest.java
new file mode 100644
index 0000000..7f5a6a4
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/DirectoryIndexUpdaterTest.java
@@ -0,0 +1,226 @@
+/*
+ * Copyright 2012 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeMatcher;
+import org.jmock.Expectations;
+import org.jmock.Mockery;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.exceptions.IErrorStrategy;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+
+/**
+ * Test cases for {@link DirectoryIndexUpdater}.
+ * 
+ * @author Bernd Rinn
+ */
+public class DirectoryIndexUpdaterTest
+{
+    private static final File rootDirectory = new File("targets", "unit-test-wd");
+
+    private static final File workingDirectory = new File(rootDirectory,
+            "hdf5-directory-index-updater-wd");
+
+    private Mockery context;
+
+    IDirectoryIndexProvider provider;
+
+    private DirectoryIndexUpdater updater;
+
+    @BeforeSuite
+    public void init()
+    {
+        workingDirectory.mkdirs();
+        assertTrue(workingDirectory.isDirectory());
+        workingDirectory.deleteOnExit();
+        rootDirectory.deleteOnExit();
+    }
+    
+    @BeforeMethod
+    public void initTest() throws IOException
+    {
+        FileUtils.cleanDirectory(workingDirectory);
+        this.context = new Mockery();
+        this.provider = context.mock(IDirectoryIndexProvider.class);
+        context.checking(new Expectations()
+            {
+                {
+                    one(provider).getErrorStrategy();
+                    will(returnValue(IErrorStrategy.DEFAULT_ERROR_STRATEGY));
+                }
+            });
+
+        this.updater = new DirectoryIndexUpdater(provider);
+    }
+
+    @Override
+    protected void finalize() throws Throwable
+    {
+        // Delete the working directory
+        if (workingDirectory.exists() && workingDirectory.canWrite())
+        {
+            workingDirectory.delete();
+        }
+        // Delete root directory
+        if (rootDirectory.exists() && rootDirectory.canWrite())
+        {
+            rootDirectory.delete();
+        }
+
+        super.finalize();
+    }
+
+    static class LinkEntryEntryMatcher extends TypeSafeMatcher<LinkRecord>
+    {
+        private final FileLinkType type;
+        
+        private final String name;
+
+        private final int crc32;
+
+        private final long lastModified;
+
+        LinkEntryEntryMatcher(FileLinkType type, String name, int crc32, long lastModified)
+        {
+            this.type = type;
+            this.name = name;
+            this.crc32 = crc32;
+            this.lastModified = lastModified;
+        }
+        
+        static LinkEntryEntryMatcher file(String name, int crc32, long lastModified)
+        {
+            return new LinkEntryEntryMatcher(FileLinkType.REGULAR_FILE, name, crc32, lastModified);
+        }
+
+        static LinkEntryEntryMatcher dir(String name, long lastModified)
+        {
+            return new LinkEntryEntryMatcher(FileLinkType.DIRECTORY, name, 0, lastModified);
+        }
+
+        @Override
+        public void describeTo(Description description)
+        {
+            description.appendText(toString());
+        }
+
+        @Override
+        public boolean matchesSafely(LinkRecord item)
+        {
+            if (name.equals(item.getLinkName()) == false)
+            {
+                System.err.printf("linkName=%s (expected: %s)\n", item.getLinkName(), name);
+                return false;
+            }
+            if (type != item.getLinkType())
+            {
+                System.err.printf("linkType=%s (expected: %s) [linkName=%s]\n", item.getLinkType(), type, name);
+                return false;
+            }
+            if (crc32 != item.getCrc32())
+            {
+                System.err.printf("crc32=%s (expected: %s) [linkName=%s]\n", item.getCrc32(), crc32, name);
+                return false;
+            }
+            if (Math.abs(lastModified - item.getLastModified()) > 1)
+            {
+                System.err.printf("lastModified=%s (expected: %s) [linkName=%s]\n", item.getLastModified(),
+                        lastModified, name);
+                return false;
+            }
+            return true;
+        }
+
+        @Override
+        public String toString()
+        {
+            return "LinkEntryEntryMatcher [type=" + type + ", name=" + name + ", crc32=" + crc32
+                    + ", lastModified=" + lastModified + "]";
+        }
+
+    }
+
+    @Test
+    public void testFileExistsOnFS() throws IOException
+    {
+        final String name = "abc";
+        final int crc32 = 123;
+        final long lastModified = 543L; 
+        final File f = new File(workingDirectory, name);
+        FileUtils.touch(f);
+        f.setLastModified(lastModified * 1000L);
+        context.checking(new Expectations()
+            {
+                {
+                    final IDirectoryIndex indexBlaBlub  = context.mock(IDirectoryIndex.class, "indexBlaBlub");
+                    one(provider).get("/bla/blub", false);
+                    will(returnValue(indexBlaBlub));
+                    one(indexBlaBlub).updateIndex(with(LinkEntryEntryMatcher.file(name, crc32, lastModified)));
+
+                    final IDirectoryIndex indexBla  = context.mock(IDirectoryIndex.class, "indexBla");
+                    one(provider).get("/bla", false);
+                    will(returnValue(indexBla));
+                    one(indexBla).updateIndex(with(LinkEntryEntryMatcher.dir("blub", System.currentTimeMillis()/1000)));
+
+                    final IDirectoryIndex indexRoot  = context.mock(IDirectoryIndex.class, "indexRoot");
+                    one(provider).get("/", false);
+                    will(returnValue(indexRoot));
+                    one(indexRoot).updateIndex(with(LinkEntryEntryMatcher.dir("bla", System.currentTimeMillis()/1000)));
+                }
+            });
+        updater.updateIndicesOnThePath("/bla/blub", f, 123, false);
+    }
+
+    @Test
+    public void testOnlyFileAndDirExistsOnFS() throws IOException
+    {
+        final String name = "abc";
+        final int crc32 = 123;
+        final long lastModified = 543L; 
+        final File f = new File(new File(workingDirectory, "ttt"), name);
+        f.getParentFile().mkdirs();
+        FileUtils.touch(f);
+        f.setLastModified(lastModified * 1000L);
+        final long lastModifiedDir = 2222L; 
+        f.getParentFile().setLastModified(lastModifiedDir * 1000L);
+        context.checking(new Expectations()
+            {
+                {
+                    final IDirectoryIndex indexTtt  = context.mock(IDirectoryIndex.class, "indexTtt");
+                    one(provider).get("/ttt", false);
+                    will(returnValue(indexTtt));
+                    one(indexTtt).updateIndex(with(LinkEntryEntryMatcher.file(name, crc32, lastModified)));
+
+                    final IDirectoryIndex indexRoot  = context.mock(IDirectoryIndex.class, "indexRoot");
+                    one(provider).get("/", false);
+                    will(returnValue(indexRoot));
+                    one(indexRoot).updateIndex(with(LinkEntryEntryMatcher.dir("ttt", lastModifiedDir)));
+                }
+            });
+        updater.updateIndicesOnThePath("/ttt", f, 123, false);
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverTest.java
new file mode 100644
index 0000000..0c02cf7
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/HDF5ArchiverTest.java
@@ -0,0 +1,949 @@
+/*
+ * Copyright 2009 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertFalse;
+import static org.testng.AssertJUnit.assertNotNull;
+import static org.testng.AssertJUnit.assertNull;
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.ArrayUtils;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Test;
+
+import ch.rinn.restrictions.Friend;
+import ch.systemsx.cisd.base.unix.FileLinkType;
+import ch.systemsx.cisd.base.unix.Unix;
+import ch.systemsx.cisd.base.unix.Unix.Stat;
+import ch.systemsx.cisd.base.utilities.OSUtilities;
+import ch.systemsx.cisd.hdf5.HDF5Factory;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * Tests for {@link HDF5Archiver}.
+ * 
+ * @author Bernd Rinn
+ */
+ at Friend(toClasses =
+    { HDF5Archiver.class, IdCache.class, LinkRecord.class })
+public class HDF5ArchiverTest
+{
+    private static final File rootDirectory = new File("targets", "unit-test-wd");
+
+    private static final File workingDirectory = new File(rootDirectory, "hdf5-archivertest-wd");
+
+    private static final File srcDirectory = new File("sourceTest/java/ch/systemsx/cisd/hdf5/h5ar");
+
+    @BeforeSuite
+    public void init()
+    {
+        workingDirectory.mkdirs();
+        assertTrue(workingDirectory.isDirectory());
+        workingDirectory.deleteOnExit();
+        rootDirectory.deleteOnExit();
+    }
+
+    @AfterTest
+    public void cleanup()
+    {
+        deleteAll(workingDirectory);
+    }
+
+    private void deleteAll(File path)
+    {
+        if (path.isDirectory())
+        {
+            for (File sub : path.listFiles())
+            {
+                deleteAll(sub);
+            }
+        }
+        path.delete();
+    }
+
+    @Override
+    protected void finalize() throws Throwable
+    {
+        // Delete the working directory
+        if (workingDirectory.exists() && workingDirectory.canWrite())
+        {
+            workingDirectory.delete();
+        }
+        // Delete root directory
+        if (rootDirectory.exists() && rootDirectory.canWrite())
+        {
+            rootDirectory.delete();
+        }
+
+        super.finalize();
+    }
+
+    @Test
+    public void testGetPermissions()
+    {
+        assertEquals("----------", Utils.permissionsToString(0, false, false));
+        assertEquals("-rw-rw-rw-", Utils.permissionsToString(0666, false, false));
+        assertEquals("-r--r--r--", Utils.permissionsToString(0444, false, false));
+        assertEquals("-rwx------", Utils.permissionsToString(0700, false, false));
+        assertEquals("-rwsr-xr-x", Utils.permissionsToString(04755, false, false));
+        assertEquals("-rwSr-xr-x", Utils.permissionsToString(04655, false, false));
+        assertEquals("-rwxr-sr-x", Utils.permissionsToString(02755, false, false));
+        assertEquals("-rwxr-Sr-x", Utils.permissionsToString(02745, false, false));
+        assertEquals("-rwxr-xr-t", Utils.permissionsToString(01755, false, false));
+        assertEquals("-rwxr-xr-T", Utils.permissionsToString(01754, false, false));
+        assertEquals("d---------", Utils.permissionsToString(0, true, false));
+        assertEquals("drwxr-xr-x", Utils.permissionsToString(0755, true, false));
+    }
+
+    @Test(groups =
+        { "requires_unix" })
+    public void testDescribeLink()
+    {
+        final String rootGroupName = OSUtilities.isMacOS() ? "wheel" : "root";
+        final IdCache idCache = new IdCache();
+        assertEquals("dir/link_name", new ArchiveEntry("dir", "dir/link_name", new LinkRecord(null,
+                null, null, -1, -1, -1, -1, (short) -1, 0), idCache).describeLink(false, false));
+        assertEquals("       100\t00000000\tdir/link_name", new ArchiveEntry("dir",
+                "dir/link_name", new LinkRecord(null, null, FileLinkType.REGULAR_FILE, 100, -1, -1,
+                        -1, (short) -1, 0), idCache).describeLink(true, false));
+        assertEquals("-rwxr-xr-x\troot\t" + rootGroupName
+                + "\t       111\t2000-01-01 00:00:00\t00000000\tdir/link_name", new ArchiveEntry(
+                "dir", "dir/link_name", new LinkRecord(null, null, FileLinkType.REGULAR_FILE, 111L,
+                        946681200491L / 1000L, 0, 0, (short) 0755, 0), idCache).describeLink(true,
+                false));
+        assertEquals("d---------\troot\t" + rootGroupName
+                + "\t       DIR\t2000-01-01 00:00:00\t        \tdir/link_name", new ArchiveEntry(
+                "dir", "dir/link_name", new LinkRecord(null, null, FileLinkType.DIRECTORY, 111L,
+                        946681200491L / 1000L, 0, 0, (short) 0, 0), idCache).describeLink(true,
+                false));
+        assertEquals("755\t0\t0\t       111\t2000-01-01 00:00:00\t" + Utils.crc32ToString(200)
+                + "\tdir/link_name", new ArchiveEntry("dir", "dir/link_name", new LinkRecord(null,
+                null, FileLinkType.REGULAR_FILE, 111L, 946681200491L / 1000L, 0, 0, (short) 0755,
+                200), idCache).describeLink(true, true));
+        assertEquals("0\t0\t0\t       DIR\t2000-01-01 00:00:00\t        \tdir/link_name",
+                new ArchiveEntry("dir", "dir/link_name", new LinkRecord("link_name2", null,
+                        FileLinkType.DIRECTORY, 111L, 946681200491L / 1000L, 0, 0, (short) 0, 0),
+                        idCache).describeLink(true, true));
+        assertEquals("       111\t2000-01-01 00:00:00\t00000000\tdir/link_name", new ArchiveEntry(
+                "dir", "dir/link_name", new LinkRecord("link_name", null,
+                        FileLinkType.REGULAR_FILE, 111L, 946681200491L / 1000L, -1, 0,
+                        (short) 0755, 0), idCache).describeLink(true, false));
+        assertEquals("       111\t00000000\tdir/link_name", new ArchiveEntry("dir",
+                "dir/link_name", new LinkRecord("link_name2", null, FileLinkType.REGULAR_FILE,
+                        111L, -1L, -1, 0, (short) 0755, 0), idCache).describeLink(true, false));
+    }
+
+    @Test(groups =
+        { "requires_unix" })
+    public void testIdCache()
+    {
+        if (Unix.isOperational() == false)
+        {
+            return;
+        }
+        final int uid = Unix.getUid();
+        final String uname = Unix.tryGetUserNameForUid(uid);
+        final IdCache idCache = new IdCache();
+        assertEquals("-17", idCache.getUser(-17, true));
+        assertEquals("root", idCache.getUser(0, false));
+        assertEquals(uname, idCache.getUser(uid, false));
+        Integer invalidUid = getInvalidUid();
+        if (invalidUid != null)
+        {
+            assertEquals(Integer.toString(invalidUid), idCache.getUser(invalidUid, false));
+        }
+    }
+
+    private Integer getInvalidUid()
+    {
+        int invalidUid;
+        for (invalidUid = 60000; invalidUid < 65535 && Unix.tryGetUserByUid(invalidUid) != null; ++invalidUid)
+        {
+        }
+        return Unix.tryGetUserByUid(invalidUid) == null ? invalidUid : null;
+    }
+
+    private void writeToArchive(final IHDF5Archiver a, final String name, final String content)
+    {
+        final byte[] bytes = content.getBytes();
+        a.archiveFile(NewArchiveEntry.file("/test", name).lastModified(1000000L).uid(100).gid(100),
+                new ByteArrayInputStream(bytes));
+    }
+
+    @Test
+    public void testWriteByteArrayToArchive()
+    {
+        final File file = new File(workingDirectory, "writeByteArrayToArchive.h5ar");
+        file.delete();
+        file.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(file);
+        writeToArchive(a, "hello.txt", "Hello World\n");
+        writeToArchive(a, "hello2.txt", "Yet another Hello World\n");
+        a.close();
+        final IHDF5ArchiveReader aro = HDF5ArchiverFactory.openForReading(file);
+        final String content1 = new String(aro.extractFileAsByteArray("/test/hello.txt"));
+        assertEquals("Hello World\n", content1);
+        final String content2 = new String(aro.extractFileAsByteArray("/test/hello2.txt"));
+        assertEquals("Yet another Hello World\n", content2);
+        final List<ArchiveEntry> list =
+                aro.list("/", ListParameters.build().nonRecursive().noReadLinkTarget().get());
+        assertEquals(1, list.size());
+        assertEquals("755\t100\t100\t       DIR\t1970-01-12 14:46:40\t        \t/test", list.get(0)
+                .describeLink(true, true));
+        final List<ArchiveEntry> list2 =
+                aro.list("/test", ListParameters.build().testArchive().suppressDirectoryEntries()
+                        .get());
+        assertEquals(2, list2.size());
+        assertEquals(
+                "755\t100\t100\t        12\t1970-01-12 14:46:40\tb095e5e3\t/test/hello.txt\tOK",
+                list2.get(0).describeLink(true, true));
+        assertEquals(
+                "755\t100\t100\t        24\t1970-01-12 14:46:40\tee5f3107\t/test/hello2.txt\tOK",
+                list2.get(1).describeLink(true, true));
+        aro.close();
+    }
+
+    @Test
+    public void testWriteFileAsOutputStream() throws Exception
+    {
+        final File file = new File(workingDirectory, "writeFileAsOutputStream.h5ar");
+        file.delete();
+        file.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(file);
+        final PrintStream ps =
+                new PrintStream(a.archiveFileAsOutputStream(NewArchiveEntry.file("test1")
+                        .chunkSize(128)));
+        ps.printf("Some %s stuff: %d\n", "more", 17);
+        // Note: we don't close the PrintStream or the underlying OutputStream explicitly.
+        // The flushables take care of things getting written correctly anyway.
+        a.close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(file);
+        final List<ArchiveEntry> entries = ar.list("", ListParameters.build().testArchive().get());
+        assertEquals(1, entries.size());
+        assertEquals("test1", entries.get(0).getName());
+        assertTrue(entries.get(0).isOK());
+        final BufferedReader r =
+                new BufferedReader(new InputStreamReader(ar.extractFileAsInputStream("test1")));
+        assertEquals("Some more stuff: 17", r.readLine());
+        assertNull(r.readLine());
+        ar.close();
+    }
+
+    private File createTestDirectory() throws IOException
+    {
+        return createTestDirectory(null, System.currentTimeMillis());
+    }
+
+    private File createTestDirectory(String prefixOrNull, long time) throws IOException
+    {
+        final File prefixDir =
+                (prefixOrNull != null) ? new File(workingDirectory, prefixOrNull)
+                        : workingDirectory;
+        prefixDir.delete();
+        prefixDir.deleteOnExit();
+        final File dir = new File(prefixDir, "test");
+        dir.delete();
+        dir.deleteOnExit();
+        dir.mkdirs();
+        final File f1 = new File(dir, "file_test1.txt");
+        f1.delete();
+        f1.deleteOnExit();
+        FileUtils.writeLines(f1, Arrays.asList("Line 1", "Line 2", "Line 3"));
+        f1.setLastModified(time);
+        final File dir2 = new File(dir, "dir_somedir");
+        dir2.delete();
+        dir2.mkdir();
+        dir2.deleteOnExit();
+        final File f2 = new File(dir2, "file_test2.txt");
+        f2.delete();
+        f2.deleteOnExit();
+        FileUtils.writeLines(f2, Arrays.asList("A", "B", "C"));
+        f2.setLastModified(time);
+        final File dir3 = new File(dir, "dir_someotherdir");
+        dir3.delete();
+        dir3.mkdir();
+        dir3.deleteOnExit();
+        if (Unix.isOperational())
+        {
+            final File l1 = new File(dir2, "link_todir3");
+            l1.delete();
+            l1.deleteOnExit();
+            Unix.createSymbolicLink("../" + dir3.getName(), l1.getAbsolutePath());
+        }
+        dir2.setLastModified(time);
+        dir3.setLastModified(time);
+        dir.setLastModified(time);
+        return dir;
+    }
+
+    @Test
+    public void testCreateVerifyRoundtripOK() throws IOException
+    {
+        final File dir = createTestDirectory();
+        final File h5arfile = new File(workingDirectory, "testRoundtrip.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystem(dir).close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertTrue(ar.test().isEmpty());
+        assertTrue(ar.verifyAgainstFilesystem(dir).isEmpty());
+        ar.close();
+    }
+
+    @Test
+    public void testCreateVerifyContentArtificialRootRoundtripOK() throws IOException
+    {
+        final File dir = createTestDirectory();
+        final File h5arfile = new File(workingDirectory, "testRoundtripContentArtificialRoot.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystemBelowDirectory("ttt", dir).close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertTrue(ar.test().isEmpty());
+        assertTrue(ar.verifyAgainstFilesystem("", dir, "ttt").isEmpty());
+        ar.close();
+    }
+
+    @Test
+    public void testRoundtrip() throws IOException
+    {
+        final long now = System.currentTimeMillis();
+        final long dirLastChanged = now - 1000L * 3600L * 24 * 5;
+        final File dir = createTestDirectory("original", dirLastChanged);
+        final long dirLastChangedSeconds = dirLastChanged / 1000L;
+        final File h5arfile = new File(workingDirectory, "testRoundtrip.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final AtomicInteger entryCount = new AtomicInteger(0);
+        HDF5ArchiverFactory.open(h5arfile)
+                .archiveFromFilesystemBelowDirectory("/", dir, new IArchiveEntryVisitor()
+                    {
+                        @Override
+                        public void visit(ArchiveEntry entry)
+                        {
+                            entryCount.incrementAndGet();
+                            final File f = new File(dir, entry.getPath());
+                            assertTrue(entry.getPath(), f.exists());
+                            assertTrue(entry.isOK());
+                            if (entry.isSymLink() == false)
+                            {
+                                assertEquals(dirLastChangedSeconds, entry.getLastModified());
+                                assertEquals(entry.getPath(), f.isDirectory(), entry.isDirectory());
+                                assertEquals(entry.getPath(), f.isFile(), entry.isRegularFile());
+                                if (entry.isRegularFile())
+                                {
+                                    assertEquals(entry.getPath(), f.length(), entry.getSize());
+                                }
+                            }
+                        }
+                    }).close();
+        assertEquals(6, entryCount.intValue());
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        entryCount.set(0);
+        ar.list("/", new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    entryCount.incrementAndGet();
+                    final File f = new File(dir, entry.getPath());
+                    assertTrue(entry.getPath(), f.exists());
+                    assertTrue(entry.isOK());
+                    if (entry.isSymLink() == false)
+                    {
+                        assertEquals(dirLastChangedSeconds, entry.getLastModified());
+                        assertEquals(entry.getPath(), f.isDirectory(), entry.isDirectory());
+                        assertEquals(entry.getPath(), f.isFile(), entry.isRegularFile());
+                        if (entry.isRegularFile())
+                        {
+                            assertEquals(entry.getPath(), f.length(), entry.getSize());
+                        }
+                    }
+                }
+            });
+        assertEquals(5, entryCount.intValue());
+        assertTrue(ar.verifyAgainstFilesystem(dir).isEmpty());
+        final File extracted = new File(dir.getParentFile().getParentFile(), "extracted");
+        deleteAll(extracted);
+        entryCount.set(0);
+        ar.extractToFilesystem(extracted, "/", new IArchiveEntryVisitor()
+            {
+                @Override
+                public void visit(ArchiveEntry entry)
+                {
+                    entryCount.incrementAndGet();
+                    final File f = new File(dir, entry.getPath());
+                    assertTrue(entry.getPath(), f.exists());
+                    assertTrue(entry.isOK());
+                    if (entry.isSymLink() == false)
+                    {
+                        assertEquals(dirLastChangedSeconds, entry.getLastModified());
+                        assertEquals(entry.getPath(), f.isDirectory(), entry.isDirectory());
+                        assertEquals(entry.getPath(), f.isFile(), entry.isRegularFile());
+                        if (entry.isRegularFile())
+                        {
+                            assertEquals(entry.getPath(), f.length(), entry.getSize());
+                        }
+                    }
+                }
+            });
+        assertEquals(5, entryCount.get());
+        assertTrue(ar.verifyAgainstFilesystem(extracted).isEmpty());
+        entryCount.set(0);
+        checkDirectoryEntries(dir, extracted, entryCount);
+        assertEquals(5, entryCount.intValue());
+        final File partiallyExtracted =
+                new File(dir.getParentFile().getParentFile(), "partiallyExtracted");
+        deleteAll(partiallyExtracted);
+        entryCount.set(0);
+        final String[] pathsInDirSomedir = new String[]
+            { "/dir_somedir", "/dir_somedir/file_test2.txt", "/dir_somedir/link_todir3" };
+        ar.extractToFilesystemBelowDirectory(partiallyExtracted, "/dir_somedir",
+                new IArchiveEntryVisitor()
+                    {
+                        @Override
+                        public void visit(ArchiveEntry entry)
+                        {
+                            int idx = entryCount.getAndIncrement();
+                            assertEquals(pathsInDirSomedir[idx], entry.getPath());
+                        }
+                    });
+        assertEquals(3, entryCount.get());
+        ar.close();
+    }
+
+    private void checkDirectoryEntries(final File dir, final File extracted,
+            final AtomicInteger entryCount)
+    {
+        for (File f : extracted.listFiles())
+        {
+            entryCount.incrementAndGet();
+            final String relativePath =
+                    f.getAbsolutePath().substring(extracted.getAbsolutePath().length() + 1);
+            final File orig = new File(dir, relativePath);
+            assertTrue(relativePath, orig.exists());
+            assertEquals(relativePath, orig.isDirectory(), f.isDirectory());
+            assertEquals(relativePath, orig.isFile(), f.isFile());
+            if (Unix.isOperational())
+            {
+                final Stat fStat = Unix.getLinkInfo(f.getPath(), true);
+                final Stat origStat = Unix.getLinkInfo(orig.getPath(), true);
+                assertEquals(relativePath, origStat.isSymbolicLink(), fStat.isSymbolicLink());
+                assertEquals(relativePath, origStat.tryGetSymbolicLink(),
+                        fStat.tryGetSymbolicLink());
+            }
+            if (f.isDirectory())
+            {
+                checkDirectoryEntries(orig, f, entryCount);
+            }
+        }
+    }
+
+    @Test
+    public void testRoundtripArtificalRootOK() throws IOException
+    {
+        final File dir = createTestDirectory();
+        final File h5arfile = new File(workingDirectory, "testRoundtripArtificalRootOK.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystem("ttt", dir).close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        final List<ArchiveEntry> list = ar.list("/");
+        assertEquals(7, list.size());
+        assertEquals("/ttt", list.get(0).getPath());
+        assertEquals("/ttt/test", list.get(1).getPath());
+        assertEquals("/ttt/test/dir_somedir", list.get(2).getPath());
+        assertEquals("/ttt/test/dir_somedir/file_test2.txt", list.get(3).getPath());
+        assertEquals("/ttt/test/dir_somedir/link_todir3", list.get(4).getPath());
+        assertEquals("/ttt/test/dir_someotherdir", list.get(5).getPath());
+        assertEquals("/ttt/test/file_test1.txt", list.get(6).getPath());
+        assertEquals("Line 1\nLine 2\nLine 3\n",
+                new String(ar.extractFileAsByteArray("/ttt/test/file_test1.txt")));
+        assertEquals("A\nB\nC\n",
+                new String(ar.extractFileAsByteArray("/ttt/test/dir_somedir/file_test2.txt")));
+        assertTrue(ar.test().isEmpty());
+        List<ArchiveEntry> verifyErrors =
+                ar.verifyAgainstFilesystem("/", dir.getParentFile(), "/ttt");
+        assertTrue(verifyErrors.toString(), verifyErrors.isEmpty());
+
+        final List<ArchiveEntry> list2 = ar.list("/ttt/test/dir_somedir");
+        assertEquals(2, list2.size());
+        assertEquals("file_test2.txt", list2.get(0).getName());
+        assertEquals("link_todir3", list2.get(1).getName());
+
+        final List<ArchiveEntry> list3 =
+                ar.list("/ttt/test/dir_somedir", ListParameters.build()
+                        .includeTopLevelDirectoryEntry().get());
+        assertEquals(3, list3.size());
+        assertEquals("dir_somedir", list3.get(0).getName());
+        assertEquals("file_test2.txt", list3.get(1).getName());
+        assertEquals("link_todir3", list3.get(2).getName());
+
+        ar.close();
+    }
+
+    @Test
+    public void testRoundtripArtificalRootWhichExistsOnFSOK() throws IOException
+    {
+        final long now = System.currentTimeMillis();
+        final long dirLastChanged = now - 1000L * 3600L * 24 * 3;
+        final File dir = createTestDirectory("ttt", dirLastChanged);
+        // Set some special last modified time and access mode that we can recognize
+        dir.getParentFile().setLastModified(111000L);
+        Unix.setAccessMode(dir.getParent(), (short) 0777);
+        final File h5arfile =
+                new File(workingDirectory, "testRoundtripArtificalRootWhichExistsOnFSOK.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystem("ttt", dir).close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        final List<ArchiveEntry> list = ar.list("/");
+        assertEquals(7, list.size());
+        assertEquals("/ttt", list.get(0).getPath());
+        // Does the archive entry have the last modified time and access mode we have set in the
+        // filesystem?
+        assertEquals(111, list.get(0).getLastModified());
+        assertEquals((short) 0777, list.get(0).getPermissions());
+        assertEquals("/ttt/test", list.get(1).getPath());
+        assertEquals("/ttt/test/dir_somedir", list.get(2).getPath());
+        assertEquals("/ttt/test/dir_somedir/file_test2.txt", list.get(3).getPath());
+        assertEquals("/ttt/test/dir_somedir/link_todir3", list.get(4).getPath());
+        assertEquals("/ttt/test/dir_someotherdir", list.get(5).getPath());
+        assertEquals("/ttt/test/file_test1.txt", list.get(6).getPath());
+        assertEquals("Line 1\nLine 2\nLine 3\n",
+                new String(ar.extractFileAsByteArray("/ttt/test/file_test1.txt")));
+        assertEquals("A\nB\nC\n",
+                new String(ar.extractFileAsByteArray("/ttt/test/dir_somedir/file_test2.txt")));
+        assertTrue(ar.test().isEmpty());
+        List<ArchiveEntry> verifyErrors =
+                ar.verifyAgainstFilesystem("/", dir.getParentFile(), "/ttt");
+        assertTrue(verifyErrors.toString(), verifyErrors.isEmpty());
+        ar.close();
+    }
+
+    @Test
+    public void testGetInfo() throws IOException
+    {
+        final File dir = createTestDirectory();
+        final File h5arfile = new File(workingDirectory, "testGetInfo.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystem(dir).close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertTrue(ar.exists("file_test1.txt"));
+        assertTrue(ar.isRegularFile("file_test1.txt"));
+        assertFalse(ar.isDirectory("file_test1.txt"));
+        assertFalse(ar.isSymLink("file_test1.txt"));
+        assertTrue(ar.exists("dir_somedir"));
+        assertFalse(ar.isRegularFile("dir_somedir"));
+        assertFalse(ar.isSymLink("dir_somedir"));
+        assertTrue(ar.isDirectory("dir_somedir"));
+        assertTrue(ar.exists("dir_somedir/link_todir3"));
+        assertFalse(ar.isRegularFile("dir_somedir/link_todir3"));
+        assertFalse(ar.isDirectory("dir_somedir/link_todir3"));
+        assertTrue(ar.isSymLink("dir_somedir/link_todir3"));
+        assertEquals("../dir_someotherdir", ar.tryGetEntry("dir_somedir/link_todir3", true)
+                .getLinkTarget());
+        ar.close();
+    }
+    
+    private static final String ARCHIVE_LISTING = "775\t1001\t1001\t       DIR\t2015-02-21 14:01:31\t        \t/tmp\n" + 
+    		"775\t1001\t1001\t       DIR\t2015-02-21 14:01:40\t        \t/tmp/c\n" + 
+    		"664\t1001\t1001\t         7\t2015-02-21 14:01:40\t046d0418\t/tmp/c/d\n" + 
+    		"664\t1001\t1001\t         7\t2015-02-21 14:01:21\t791af05d\t/tmp/a\n" + 
+    		"664\t1001\t1001\t         7\t2015-02-21 14:01:27\t5237a39e\t/tmp/b";
+    
+    @Test
+    public void testReadExistingArchive()
+    {
+        final File h5arfile = new File(srcDirectory, "test.h5ar");
+        System.out.println(h5arfile);
+        assertTrue(h5arfile.exists());
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertEquals(ARCHIVE_LISTING, toStringNumeric(ar));
+        final List<ArchiveEntry> testResult = ar.test();
+        assertTrue(ArrayUtils.toString(testResult), testResult.isEmpty());
+        ar.close();
+    }
+
+    @Test
+    public void testReadLegacy_14_12_0_Archive()
+    {
+        final File h5arfile = new File(srcDirectory, "test_14_12_0.h5ar");
+        System.out.println(h5arfile);
+        assertTrue(h5arfile.exists());
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertEquals(ARCHIVE_LISTING, toStringNumeric(ar));
+        final List<ArchiveEntry> testResult = ar.test();
+        assertTrue(ArrayUtils.toString(testResult), testResult.isEmpty());
+        ar.close();
+    }
+
+    private String toStringNumeric(final IHDF5ArchiveReader ar)
+    {
+        final StringBuilder b = new StringBuilder();
+        for (ArchiveEntry e : ar.list())
+        {
+            b.append(e.describeLink(true, true));
+            b.append('\n');
+        }
+        b.setLength(b.length() - 1);
+        return b.toString();
+    }
+
+    private void checkSorted(List<ArchiveEntry> entries)
+    {
+        boolean dirs = true;
+        for (int i = 1; i < entries.size(); ++i)
+        {
+            if (dirs && entries.get(i).isDirectory() == false)
+            {
+                dirs = false;
+            } else
+            {
+                assertTrue(entries.get(i - 1).getName().compareTo(entries.get(i).getName()) < 0);
+            }
+        }
+    }
+
+    @Test
+    public void testManyFiles()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testManyFiles.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+        for (int i = 999; i >= 0; --i)
+        {
+            a.archiveFile(Integer.toString(i), new byte[0]);
+        }
+        a.archiveSymlink("symlink", "500");
+        a.archiveDirectory(NewArchiveEntry.directory("/dir"));
+        a.archiveFile("dir/hello", "hello world".getBytes());
+        final List<ArchiveEntry> entries = a.list("/");
+        assertEquals(1003, entries.size());
+        final ArchiveEntry symLinkEntry = a.tryGetEntry("symlink", true);
+        assertNotNull(symLinkEntry);
+        assertTrue(symLinkEntry.isSymLink());
+        assertTrue(symLinkEntry.hasLinkTarget());
+        assertEquals("500", symLinkEntry.getLinkTarget());
+        final ArchiveEntry dirEntry = a.tryGetEntry("dir", true);
+        assertNotNull(dirEntry);
+        assertTrue(dirEntry.isDirectory());
+        assertFalse(dirEntry.isRegularFile());
+        assertFalse(dirEntry.isSymLink());
+
+        final List<ArchiveEntry> entriesDir = a.list("/dir");
+        assertEquals(1, entriesDir.size());
+        assertEquals("hello", entriesDir.get(0).getName());
+        a.close();
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+        final List<ArchiveEntry> entriesRead =
+                ra.list("/", ListParameters.build().nonRecursive().get());
+        assertEquals(1002, entriesRead.size());
+        checkSorted(entriesRead);
+        for (int i = 1; i < entriesRead.size() - 1; ++i)
+        {
+            assertTrue(entriesRead.get(i).isRegularFile());
+        }
+        assertTrue(entriesRead.get(0).isDirectory());
+        assertTrue(entriesRead.get(entriesRead.size() - 1).isSymLink());
+        for (int i = 1; i < 1001; ++i)
+        {
+            assertTrue(ra.isRegularFile(Integer.toString(i - 1)));
+            assertFalse(ra.isDirectory(Integer.toString(i - 1)));
+            assertFalse(ra.isSymLink(Integer.toString(i - 1)));
+        }
+        assertTrue(ra.isSymLink("symlink"));
+        assertFalse(ra.isDirectory("symlink"));
+        assertFalse(ra.isRegularFile("symlink"));
+        assertEquals("500", ra.tryGetEntry("symlink", true).getLinkTarget());
+        assertTrue(ra.isDirectory("dir"));
+        assertFalse(ra.isSymLink("dir"));
+        assertFalse(ra.isRegularFile("dir"));
+        ra.close();
+    }
+
+    @Test
+    public void testFollowSymbolicLinks()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testFollowSymbolicLinks.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+        a.archiveDirectory(NewArchiveEntry.directory("aDir"));
+        a.archiveFile(NewArchiveEntry.file("aDir/aFile"), "Some file content".getBytes());
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToAFile", "aDir/aFile"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToADir", "aDir"));
+        a.close();
+
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+        final List<ArchiveEntry> entries =
+                ra.list("/", ListParameters.build()
+                        ./* resolveSymbolicLinks(). */followSymbolicLinks().get());
+
+        assertEquals(5, entries.size());
+        assertEquals("/aDir", entries.get(0).getPath());
+        assertTrue(entries.get(0).isDirectory());
+        assertEquals("/aDir/aFile", entries.get(1).getPath());
+        assertTrue(entries.get(1).isRegularFile());
+        assertEquals("/aLinkToADir", entries.get(2).getPath());
+        assertTrue(entries.get(2).isSymLink());
+        assertEquals("/aLinkToADir/aFile", entries.get(3).getPath());
+        assertTrue(entries.get(3).isRegularFile());
+        assertEquals("/aLinkToAFile", entries.get(4).getPath());
+        assertTrue(entries.get(4).isSymLink());
+
+        ra.close();
+    }
+
+    @Test
+    public void testFollowAndResolveSymbolicLinks()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testFollowAndResolveSymbolicLinks.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+        a.archiveDirectory(NewArchiveEntry.directory("aDir"));
+        a.archiveFile(NewArchiveEntry.file("aDir/aFile"), "Some file content".getBytes());
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToAFile", "aDir/aFile"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToADir", "aDir"));
+        a.close();
+
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+
+        final List<ArchiveEntry> entries =
+                ra.list("/", ListParameters.build().resolveSymbolicLinks().followSymbolicLinks()
+                        .get());
+
+        assertEquals(5, entries.size());
+        assertEquals("/aDir", entries.get(0).getPath());
+        assertTrue(entries.get(0).isDirectory());
+        assertEquals("/aDir/aFile", entries.get(1).getPath());
+        assertTrue(entries.get(1).isRegularFile());
+        assertEquals("/aLinkToADir", entries.get(2).getPath());
+        assertEquals("/aDir", entries.get(2).getRealPath());
+        assertTrue(entries.get(2).isDirectory());
+        assertEquals("/aLinkToADir/aFile", entries.get(3).getPath());
+        assertTrue(entries.get(3).isRegularFile());
+        assertEquals("/aLinkToAFile", entries.get(4).getPath());
+        assertEquals("/aDir/aFile", entries.get(4).getRealPath());
+        assertTrue(entries.get(4).isRegularFile());
+
+        ra.close();
+    }
+
+    @Test
+    public void testResolveLinks()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testResolveLinks.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+        a.archiveFile(NewArchiveEntry.file("aFile"), "Some file content".getBytes());
+        a.archiveDirectory(NewArchiveEntry.directory("aDir"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToAFile", "aFile"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToADir", "aDir"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aNonsenseLink", "../outOfFS"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aLinkToANonexistingFile", "nonexistingFile"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aDir/aLinkToALinkToAFile", "../aLinkToAFile"));
+        a.archiveSymlink(NewArchiveEntry.symlink("aDir/aLinkToALinkToADir", "/aLinkToADir"));
+
+        // A loop
+        a.archiveDirectory(NewArchiveEntry.directory("z"));
+        a.archiveSymlink(NewArchiveEntry.symlink("z/y", ".."));
+
+        a.close();
+
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+
+        // A file is resolved to itself
+        final ArchiveEntry aFileLink = ra.tryGetEntry("aFile", false);
+        assertEquals(aFileLink, ra.tryResolveLink(aFileLink));
+
+        // A directory is resolved to itself
+        final ArchiveEntry aDirLink = ra.tryGetEntry("aDir", false);
+        assertEquals(aDirLink, ra.tryResolveLink(aDirLink));
+
+        // A symlink to a file is correctly resolved...
+        final ArchiveEntry aSymLinkToAFile = ra.tryGetEntry("aLinkToAFile", true);
+        final ArchiveEntry aResolvedLinkToAFile = ra.tryResolveLink(aSymLinkToAFile);
+        assertNotNull(aResolvedLinkToAFile);
+        assertEquals(aFileLink.getPath(), aResolvedLinkToAFile.getPath());
+        // .. even when the link target was not read
+        final ArchiveEntry aSymLinkToAFileWithoutTarget = ra.tryGetEntry("aLinkToAFile", false);
+        final ArchiveEntry aResolvedLinkToAFileWithoutTarget =
+                ra.tryResolveLink(aSymLinkToAFileWithoutTarget);
+        assertNotNull(aResolvedLinkToAFileWithoutTarget);
+        assertEquals(aFileLink.getPath(), aResolvedLinkToAFileWithoutTarget.getPath());
+
+        // A symlink to a dir is correctly resolved as well
+        final ArchiveEntry aSymLinkToADir = ra.tryGetEntry("aLinkToADir", true);
+        final ArchiveEntry aResolvedLinkToADir = ra.tryResolveLink(aSymLinkToADir);
+        assertNotNull(aResolvedLinkToADir);
+        assertEquals(aDirLink.getPath(), aResolvedLinkToADir.getPath());
+
+        // A nonsense link ('/../outOfFS') is resolved to null
+        assertNull(ra.tryResolveLink(ra.tryGetEntry("aNonsenseLink", true)));
+
+        // A link to a non-existing file is resolved to null
+        assertNull(ra.tryResolveLink(ra.tryGetEntry("aLinkToANonexistingFile", true)));
+
+        // A link to a link to a file
+        final ArchiveEntry aSymLinkToALinkToAFile =
+                ra.tryGetEntry("/aDir/aLinkToALinkToAFile", false);
+        final ArchiveEntry aResolvedSymLinkToALinkToAFile =
+                ra.tryResolveLink(aSymLinkToALinkToAFile);
+        assertNotNull(aResolvedSymLinkToALinkToAFile);
+        assertEquals(aFileLink.getPath(), aResolvedSymLinkToALinkToAFile.getPath());
+        final ArchiveEntry aSymLinkToALinkToAFileWithPathInfoKept =
+                ra.tryGetResolvedEntry("/aDir/aLinkToALinkToAFile", true);
+        assertEquals("/aDir", aSymLinkToALinkToAFileWithPathInfoKept.getParentPath());
+        assertEquals("aLinkToALinkToAFile", aSymLinkToALinkToAFileWithPathInfoKept.getName());
+        assertTrue(aSymLinkToALinkToAFileWithPathInfoKept.isRegularFile());
+        assertEquals(ra.tryGetEntry("aFile", false).getSize(),
+                aSymLinkToALinkToAFileWithPathInfoKept.getSize());
+
+        // A link to a link to a dir
+        final ArchiveEntry aSymLinkToALinkToADir =
+                ra.tryGetEntry("/aDir/aLinkToALinkToADir", false);
+        final ArchiveEntry aResolvedSymLinkToALinkToADir = ra.tryResolveLink(aSymLinkToALinkToADir);
+        assertNotNull(aResolvedSymLinkToALinkToADir);
+        assertEquals(aDirLink.getPath(), aResolvedSymLinkToALinkToADir.getPath());
+
+        final List<ArchiveEntry> entries =
+                ra.list("/", ListParameters.build().resolveSymbolicLinks().get());
+        assertEquals(8, entries.size());
+        assertEquals("/aDir", entries.get(0).getPath());
+        assertTrue(entries.get(0).isDirectory());
+        assertEquals("/aDir/aLinkToALinkToADir", entries.get(1).getPath());
+        assertEquals("/aDir", entries.get(1).getRealPath());
+        assertTrue(entries.get(1).isDirectory());
+        assertEquals("/aDir/aLinkToALinkToAFile", entries.get(2).getPath());
+        assertEquals("/aFile", entries.get(2).getRealPath());
+        assertTrue(entries.get(2).isRegularFile());
+        assertEquals("/z", entries.get(3).getPath());
+        assertTrue(entries.get(3).isDirectory());
+        assertEquals("/z/y", entries.get(4).getPath());
+        assertEquals("/", entries.get(4).getRealPath());
+        assertTrue(entries.get(4).isDirectory());
+        assertEquals("/aFile", entries.get(5).getPath());
+        assertTrue(entries.get(5).isRegularFile());
+        assertEquals("/aLinkToADir", entries.get(6).getPath());
+        assertTrue(entries.get(6).isDirectory());
+        assertEquals("/aLinkToAFile", entries.get(7).getPath());
+        assertTrue(entries.get(7).isRegularFile());
+        assertEquals(entries.get(5).getCrc32(), entries.get(7).getCrc32());
+
+        assertEquals("/", ra.tryGetResolvedEntry("z/y", false).getPath());
+
+        ra.close();
+    }
+
+    @Test
+    public void testResolveLinksWithLoops()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testResolveLinksWithLoops.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+        a.archiveSymlink(NewArchiveEntry.symlink("a", "b"));
+        a.archiveSymlink(NewArchiveEntry.symlink("b", "a"));
+
+        a.archiveSymlink(NewArchiveEntry.symlink("c", "d"));
+        a.archiveSymlink(NewArchiveEntry.symlink("d", "e"));
+        a.archiveSymlink(NewArchiveEntry.symlink("e", "c"));
+        a.close();
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+        assertNull(ra.tryGetResolvedEntry("a", false));
+        assertNull(ra.tryGetResolvedEntry("d", false));
+        assertTrue(ra.list("/",
+                ListParameters.build().resolveSymbolicLinks().followSymbolicLinks().get())
+                .isEmpty());
+        ra.close();
+    }
+
+    @Test(expectedExceptions = ListArchiveTooManySymbolicLinksException.class)
+    public void testResolveLinksWithLoopsInPath()
+    {
+        workingDirectory.mkdirs();
+        final File h5arfile = new File(workingDirectory, "testResolveLinksWithLoopsInPath.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        final IHDF5Archiver a = HDF5ArchiverFactory.open(h5arfile);
+
+        // A loop in the paths
+        a.archiveDirectory(NewArchiveEntry.directory("1"));
+        a.archiveDirectory(NewArchiveEntry.directory("2"));
+        a.archiveSymlink(NewArchiveEntry.symlink("1/3", "/2"));
+        a.archiveSymlink(NewArchiveEntry.symlink("2/4", "/1"));
+        a.close();
+        final IHDF5ArchiveReader ra = HDF5ArchiverFactory.openForReading(h5arfile);
+        try
+        {
+            // Will throw ListArchiveTooManySymbolicLinksException
+            ra.list("/", ListParameters.build().resolveSymbolicLinks().followSymbolicLinks().get());
+        } finally
+        {
+            ra.close();
+        }
+    }
+
+    @Test
+    public void testIntegrityCheck() throws IOException
+    {
+        final File dir = createTestDirectory();
+        final File h5arfile = new File(workingDirectory, "testIntegrityCheck.h5ar");
+        h5arfile.delete();
+        h5arfile.deleteOnExit();
+        HDF5ArchiverFactory.open(h5arfile).archiveFromFilesystem("ttt", dir).close();
+        IHDF5Writer w = HDF5Factory.open(h5arfile);
+        w.string().write("/ttt/test/file_test1.txt", "changed behind the back.");
+        w.int8().writeArray("/ttt/test/dir_somedir/file_test2.txt", "A\nB\nD\n".getBytes());
+        w.close();
+        final IHDF5ArchiveReader ar = HDF5ArchiverFactory.openForReading(h5arfile);
+        final List<ArchiveEntry> failed = ar.test();
+        ar.close();
+        assertEquals(2, failed.size());
+        assertEquals("/ttt/test/dir_somedir/file_test2.txt", failed.get(0).getPath());
+        assertFalse(failed.get(0).isOK());
+        assertTrue(failed.get(0).sizeOK());
+        assertFalse(failed.get(0).checksumOK());
+        assertEquals("/ttt/test/file_test1.txt", failed.get(1).getPath());
+        assertFalse(failed.get(1).isOK());
+        assertFalse(failed.get(1).sizeOK());
+        assertFalse(failed.get(1).checksumOK());
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/UtilsTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/UtilsTest.java
new file mode 100644
index 0000000..df2508b
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/UtilsTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2012 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.h5ar;
+
+import static org.testng.AssertJUnit.assertEquals;
+
+import org.testng.annotations.Test;
+
+/**
+ * Test cases for {@link Utils}.
+ *
+ * @author Bernd Rinn
+ */
+public class UtilsTest
+{
+    @Test
+    public void testNormalizePath()
+    {
+        assertEquals("/", Utils.normalizePath("/"));
+        assertEquals("/a", Utils.normalizePath("a"));
+        assertEquals("/a", Utils.normalizePath("a/"));
+        assertEquals("/a", Utils.normalizePath("/a/"));
+        assertEquals("/a/b/c", Utils.normalizePath("a/b/c"));
+        assertEquals("/a/c", Utils.normalizePath("a/b/../c/./"));
+    }
+    
+    @Test
+    public void testGetParentPath()
+    {
+        assertEquals("", Utils.getParentPath("/"));
+        assertEquals("/", Utils.getParentPath("/dir"));
+        assertEquals("/some", Utils.getParentPath("/some/dir"));
+    }
+    
+    @Test
+    public void testConcatLink()
+    {
+        assertEquals("/", Utils.concatLink(Utils.getParentPath("/"), Utils.getName("/")));
+        assertEquals("/a", Utils.concatLink(Utils.getParentPath("/a"), Utils.getName("/a")));
+        assertEquals("/a/b", Utils.concatLink(Utils.getParentPath("/a/b"), Utils.getName("/a/b")));
+    }
+    
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test.h5ar b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test.h5ar
new file mode 100644
index 0000000..009b296
Binary files /dev/null and b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test.h5ar differ
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test_14_12_0.h5ar b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test_14_12_0.h5ar
new file mode 100644
index 0000000..dc70bc3
Binary files /dev/null and b/sourceTest/java/ch/systemsx/cisd/hdf5/h5ar/test_14_12_0.h5ar differ
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFileTest.java b/sourceTest/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFileTest.java
new file mode 100644
index 0000000..0415bf2
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/io/HDF5DataSetRandomAccessFileTest.java
@@ -0,0 +1,1035 @@
+/*
+ * Copyright 2011 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.io;
+
+import static ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory.asInputStream;
+import static ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory.asOutputStream;
+import static ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory.asRandomAccessFile;
+import static ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory.asRandomAccessFileReadOnly;
+import static ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory.asRandomAccessFileReadWrite;
+import static org.testng.AssertJUnit.assertEquals;
+import static org.testng.AssertJUnit.assertFalse;
+import static org.testng.AssertJUnit.assertNotNull;
+import static org.testng.AssertJUnit.assertNull;
+import static org.testng.AssertJUnit.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.ArrayUtils;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.convert.NativeData;
+import ch.systemsx.cisd.base.convert.NativeData.ByteOrder;
+import ch.systemsx.cisd.base.io.AdapterIInputStreamToInputStream;
+import ch.systemsx.cisd.hdf5.HDF5DataSetInformation;
+import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.HDF5StorageLayout;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+
+/**
+ * Test cases for {@link HDF5DataSetRandomAccessFile}.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5DataSetRandomAccessFileTest
+{
+
+    private static final File rootDirectory = new File("targets", "unit-test-wd");
+
+    private static final File workingDirectory = new File(rootDirectory,
+            "hdf5-dataset-random-access-file-wd");
+
+    @BeforeSuite
+    public void init()
+    {
+        workingDirectory.mkdirs();
+        assertTrue(workingDirectory.isDirectory());
+        workingDirectory.deleteOnExit();
+        rootDirectory.deleteOnExit();
+    }
+
+    @Override
+    protected void finalize() throws Throwable
+    {
+        // Delete the working directory
+        if (workingDirectory.exists() && workingDirectory.canWrite())
+        {
+            workingDirectory.delete();
+        }
+        // Delete root directory
+        if (rootDirectory.exists() && rootDirectory.canWrite())
+        {
+            rootDirectory.delete();
+        }
+
+        super.finalize();
+    }
+
+    @Test
+    public void testWriterOpenAfterRAFileClosed()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReaderOpenAfterRAFileClosed.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().open(dataSetFile);
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(writer, dataSetName);
+        raFile.close();
+        // Checks that reader is still open
+        writer.exists("/");
+        writer.close();
+    }
+    
+    @Test
+    public void testReadContiguousByteByByte()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadContiguousByteByByte.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).dontUseExtendableDataTypes()
+                        .writer();
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int b;
+        int idx = 0;
+        while ((b = raFile.read()) >= 0)
+        {
+            arrayRead[idx++] = (byte) b;
+            assertEquals(referenceArray.length - idx, raFile.available());
+        }
+        assertEquals(referenceArray.length, idx);
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedByteByByte()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedByteByByte.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int b;
+        int idx = 0;
+        while ((b = raFile.read()) >= 0)
+        {
+            arrayRead[idx++] = (byte) b;
+            assertEquals(referenceArray.length - idx, raFile.available());
+        }
+        assertEquals(referenceArray.length, idx);
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadContiguousBlockwise()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadContiguousBlockwise.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).dontUseExtendableDataTypes()
+                        .writer();
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        int bsize = referenceArray.length / 10;
+        int bytesRead;
+        while ((bytesRead = raFile.read(arrayRead, idx, bsize)) >= 0)
+        {
+            idx += bytesRead;
+            assertEquals(referenceArray.length - idx, raFile.available());
+        }
+        assertEquals(referenceArray.length, idx);
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedBlockwiseMatch()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedBlockwiseMatch.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        int bsize = chunkSize;
+        int bytesRead;
+        while ((bytesRead = raFile.read(arrayRead, idx, bsize)) >= 0)
+        {
+            idx += bytesRead;
+            assertEquals(referenceArray.length - idx, raFile.available());
+        }
+        assertEquals(referenceArray.length, idx);
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedBlockwiseMismatch()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedBlockwiseMismatch.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        int bsize = chunkSize + 1;
+        int bytesRead;
+        while ((bytesRead = raFile.read(arrayRead, idx, bsize)) >= 0)
+        {
+            idx += bytesRead;
+            assertEquals(referenceArray.length - idx, raFile.available());
+        }
+        assertEquals(referenceArray.length, idx);
+        for (int i = 0; i < idx; ++i)
+        {
+            if (referenceArray[i] != arrayRead[i])
+            {
+                System.err.println("Mismatch " + i + ": " + referenceArray[i] + ":" + arrayRead[i]);
+                break;
+            }
+        }
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testSkip()
+    {
+        final File dataSetFile = new File(workingDirectory, "testSkip.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        int bsize = chunkSize + 1;
+        int bytesRead = raFile.read(arrayRead, idx, bsize);
+        assertEquals(bsize, bytesRead);
+        final int skipSize = 73;
+        assertEquals(referenceArray.length - bsize, raFile.available());
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(referenceArray.length - bsize - skipSize, raFile.available());
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(referenceArray.length - bsize - 2 * skipSize, raFile.available());
+        assertEquals(referenceArray[bsize + 2 * skipSize], (byte) raFile.read());
+        raFile.close();
+    }
+
+    @Test
+    public void testMarkSupport()
+    {
+        final File dataSetFile = new File(workingDirectory, "testMarkSupport.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[1000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final byte[] arrayRead = new byte[referenceArray.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        int bsize = chunkSize + 1;
+        int bytesRead = raFile.read(arrayRead, idx, bsize);
+        assertEquals(bsize, bytesRead);
+        assertTrue(raFile.markSupported());
+        long markedFilePointer = raFile.getFilePointer();
+        raFile.mark(0);
+        assertEquals(markedFilePointer, raFile.getFilePointer());
+        byte markedByte = (byte) raFile.read();
+        assertEquals(markedFilePointer + 1, raFile.getFilePointer());
+        final int skipSize = 73;
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(markedFilePointer + 1 + skipSize, raFile.getFilePointer());
+        raFile.reset();
+        assertEquals(markedFilePointer, raFile.getFilePointer());
+        assertEquals(markedByte, (byte) raFile.read());
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(markedFilePointer + 1 + 2 * skipSize, raFile.getFilePointer());
+        raFile.reset();
+        assertEquals(markedFilePointer, raFile.getFilePointer());
+        assertEquals(markedByte, (byte) raFile.read());
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(skipSize, raFile.skip(skipSize));
+        markedFilePointer = raFile.getFilePointer();
+        raFile.mark(0);
+        assertEquals(markedFilePointer, raFile.getFilePointer());
+        markedByte = (byte) raFile.read();
+        assertEquals(markedFilePointer + 1, raFile.getFilePointer());
+        assertEquals(skipSize, raFile.skip(skipSize));
+        assertEquals(markedFilePointer + 1 + skipSize, raFile.getFilePointer());
+        raFile.reset();
+        assertEquals(markedFilePointer, raFile.getFilePointer());
+        assertEquals(markedByte, (byte) raFile.read());
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteTwiceSmallBuffer() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteTwiceSmallBuffer.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[10];
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(writer, dataSetName, HDF5GenericStorageFeatures.GENERIC_CHUNKED,
+                        11, null);
+        raFile.write(referenceArray);
+        raFile.write(referenceArray);
+        raFile.flush();
+        final HDF5DataSetInformation dsInfo = writer.getDataSetInformation(dataSetName);
+        assertEquals(HDF5StorageLayout.CHUNKED, dsInfo.getStorageLayout());
+        assertEquals(referenceArray.length * 2, dsInfo.getSize());
+        assertNull(dsInfo.getTypeInformation().tryGetOpaqueTag());
+        final byte[] arrayRead = writer.int8().readArray(dataSetName);
+        assertEquals(referenceArray.length * 2, arrayRead.length);
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            assertEquals(i, referenceArray[i], arrayRead[i]);
+        }
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            assertEquals(i, referenceArray[i], arrayRead[referenceArray.length + i]);
+        }
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteTwiceLargeBuffer() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteTwiceLargeBuffer.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[10];
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(writer, dataSetName, HDF5GenericStorageFeatures.GENERIC_CHUNKED,
+                        100, null);
+        raFile.write(referenceArray);
+        raFile.write(referenceArray);
+        raFile.flush();
+        final HDF5DataSetInformation dsInfo = writer.getDataSetInformation(dataSetName);
+        assertEquals(HDF5StorageLayout.CHUNKED, dsInfo.getStorageLayout());
+        assertEquals(referenceArray.length * 2, dsInfo.getSize());
+        assertNull(dsInfo.getTypeInformation().tryGetOpaqueTag());
+        final byte[] arrayRead = writer.int8().readArray(dataSetName);
+        assertEquals(referenceArray.length * 2, arrayRead.length);
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            assertEquals(i, referenceArray[i], arrayRead[i]);
+        }
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            assertEquals(i, referenceArray[i], arrayRead[referenceArray.length + i]);
+        }
+        raFile.close();
+    }
+
+    @Test
+    public void testCopyIOUtils() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testCopyIOUtils.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[10000];
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        final OutputStream ostream =
+                asOutputStream(writer, dataSetName, HDF5GenericStorageFeatures.GENERIC_CHUNKED,
+                        12000, null);
+        IOUtils.copyLarge(new ByteArrayInputStream(referenceArray), ostream);
+        ostream.flush();
+        final HDF5DataSetInformation dsInfo = writer.getDataSetInformation(dataSetName);
+        assertEquals(HDF5StorageLayout.CHUNKED, dsInfo.getStorageLayout());
+        assertEquals(referenceArray.length, dsInfo.getSize());
+        writer.close();
+        final InputStream istream = asInputStream(dataSetFile, dataSetName);
+        final byte[] arrayRead = IOUtils.toByteArray(istream);
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        istream.close();
+    }
+
+    @Test
+    public void testSeek() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testSeek.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[10000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        byte[] arrayRead = IOUtils.toByteArray(new AdapterIInputStreamToInputStream(raFile));
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.seek(0);
+        arrayRead = IOUtils.toByteArray(new AdapterIInputStreamToInputStream(raFile));
+        assertTrue(ArrayUtils.isEquals(referenceArray, arrayRead));
+        raFile.seek(1113);
+        assertEquals(referenceArray[1113], (byte) raFile.read());
+        raFile.close();
+    }
+
+    @Test
+    public void testLength() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testLength.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final byte[] referenceArray = new byte[10000];
+        final int chunkSize = referenceArray.length / 10;
+        for (int i = 0; i < referenceArray.length; ++i)
+        {
+            referenceArray[i] = (byte) i;
+        }
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArray.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArray);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        assertEquals(referenceArray.length, raFile.length());
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedShortBigEndian()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedShortBigEndian.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final short[] referenceArrayShort = new short[50000];
+        for (int i = 0; i < referenceArrayShort.length; ++i)
+        {
+            referenceArrayShort[i] = (short) i;
+        }
+        final byte[] referenceArrayByte =
+                NativeData.shortToByte(referenceArrayShort, ByteOrder.BIG_ENDIAN);
+        final int chunkSize = referenceArrayByte.length / 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArrayByte.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArrayByte);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final short[] arrayRead = new short[referenceArrayShort.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        while (raFile.available() >= 2)
+        {
+
+            arrayRead[idx++] = raFile.readShort();
+            assertEquals(referenceArrayByte.length - idx * 2, raFile.available());
+        }
+        assertEquals(referenceArrayByte.length, idx * 2);
+        assertTrue(ArrayUtils.isEquals(referenceArrayShort, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedShortLittleEndian()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedShortLittleEndian.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final short[] referenceArrayShort = new short[50000];
+        for (int i = 0; i < referenceArrayShort.length; ++i)
+        {
+            referenceArrayShort[i] = (short) i;
+        }
+        final byte[] referenceArrayByte =
+                NativeData.shortToByte(referenceArrayShort, ByteOrder.LITTLE_ENDIAN);
+        final int chunkSize = referenceArrayByte.length / 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArrayByte.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArrayByte);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final short[] arrayRead = new short[referenceArrayShort.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        raFile.setByteOrder(java.nio.ByteOrder.LITTLE_ENDIAN);
+        int idx = 0;
+        while (raFile.available() >= 2)
+        {
+
+            arrayRead[idx++] = raFile.readShort();
+            assertEquals(referenceArrayByte.length - idx * 2, raFile.available());
+        }
+        assertEquals(referenceArrayByte.length, idx * 2);
+        assertTrue(ArrayUtils.isEquals(referenceArrayShort, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedDoubleBigEndian()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedDoubleBigEndian.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final double[] referenceArrayDouble = new double[12500];
+        for (int i = 0; i < referenceArrayDouble.length; ++i)
+        {
+            referenceArrayDouble[i] = i;
+        }
+        final byte[] referenceArrayByte =
+                NativeData.doubleToByte(referenceArrayDouble, ByteOrder.BIG_ENDIAN);
+        final int chunkSize = referenceArrayByte.length / 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArrayByte.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArrayByte);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final double[] arrayRead = new double[referenceArrayDouble.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        int idx = 0;
+        while (raFile.available() >= 2)
+        {
+
+            arrayRead[idx++] = raFile.readDouble();
+            assertEquals(referenceArrayByte.length - idx * 8, raFile.available());
+        }
+        assertEquals(referenceArrayByte.length, idx * 8);
+        assertTrue(ArrayUtils.isEquals(referenceArrayDouble, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedDoubleLittleEndian()
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedDoubleLittleEndian.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final double[] referenceArrayDouble = new double[12500];
+        for (int i = 0; i < referenceArrayDouble.length; ++i)
+        {
+            referenceArrayDouble[i] = i;
+        }
+        final byte[] referenceArrayByte =
+                NativeData.doubleToByte(referenceArrayDouble, ByteOrder.LITTLE_ENDIAN);
+        final int chunkSize = referenceArrayByte.length / 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, referenceArrayByte.length, chunkSize);
+        writer.int8().writeArray(dataSetName, referenceArrayByte);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final double[] arrayRead = new double[referenceArrayDouble.length];
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        raFile.setByteOrder(java.nio.ByteOrder.LITTLE_ENDIAN);
+        int idx = 0;
+        while (raFile.available() >= 2)
+        {
+
+            arrayRead[idx++] = raFile.readDouble();
+            assertEquals(referenceArrayByte.length - idx * 8, raFile.available());
+        }
+        assertEquals(referenceArrayByte.length, idx * 8);
+        assertTrue(ArrayUtils.isEquals(referenceArrayDouble, arrayRead));
+        raFile.close();
+    }
+
+    @Test
+    public void testReadChunkedStringReadline() throws UnsupportedEncodingException
+    {
+        final File dataSetFile = new File(workingDirectory, "testReadChunkedStringReadline.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final String reference = "One\nTwo\nThree\r\nFour";
+        final byte[] bytesReference = reference.getBytes("ASCII");
+        final int chunkSize = 4;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, bytesReference.length, chunkSize);
+        writer.int8().writeArray(dataSetName, bytesReference);
+        assertEquals(HDF5StorageLayout.CHUNKED, writer.getDataSetInformation(dataSetName)
+                .getStorageLayout());
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadOnly(dataSetFile, dataSetName);
+        assertEquals("One", raFile.readLine());
+        assertEquals("Two", raFile.readLine());
+        assertEquals("Three", raFile.readLine());
+        assertEquals("Four", raFile.readLine());
+        assertEquals(0, raFile.available());
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteByteByByte() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteByteByByte.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.mark(0);
+        for (int i = 0; i < 256; ++i)
+        {
+            raFile.write(i);
+        }
+        raFile.reset();
+        final byte[] arrayRead = IOUtils.toByteArray(new AdapterIInputStreamToInputStream(raFile));
+        assertEquals(256, arrayRead.length);
+        for (int i = 0; i < 256; ++i)
+        {
+            assertEquals(Integer.toString(i), (byte) i, arrayRead[i]);
+        }
+
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteByteBlock() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteByteBlock.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.mark(0);
+        final byte[] arr = new byte[256];
+        for (int i = 0; i < 256; ++i)
+        {
+            arr[i] = (byte) i;
+        }
+        raFile.write(arr);
+        raFile.reset();
+        final byte[] arrayRead = IOUtils.toByteArray(new AdapterIInputStreamToInputStream(raFile));
+        assertEquals(256, arrayRead.length);
+        for (int i = 0; i < 256; ++i)
+        {
+            assertEquals(Integer.toString(i), (byte) i, arrayRead[i]);
+        }
+
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteDouble() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteDouble.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.mark(0);
+        for (int i = 0; i < 256; ++i)
+        {
+            raFile.writeDouble(i);
+        }
+        raFile.reset();
+        for (int i = 0; i < 256; ++i)
+        {
+            assertEquals(Integer.toString(i), (double) i, raFile.readDouble());
+        }
+        assertEquals(0, raFile.available());
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteBytesOfString() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteBytesOfString.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.mark(0);
+        raFile.writeBytes("TestString\n");
+        raFile.reset();
+        assertEquals("TestString", raFile.readLine());
+        raFile.close();
+    }
+
+    @Test
+    public void testWriteStringUTF8() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testWriteStringUTF8.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.mark(0);
+        raFile.writeUTF("TestString\u1873");
+        raFile.reset();
+        assertEquals("TestString\u1873", raFile.readUTF());
+        raFile.close();
+    }
+
+    @Test
+    public void testPendingExtension() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testPendingExtension.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+
+        final int chunkSize = 10;
+        final IHDF5Writer writer =
+                HDF5FactoryProvider.get().configure(dataSetFile).keepDataSetsIfTheyExist().writer();
+        writer.int8().createArray(dataSetName, 0, chunkSize);
+        writer.close();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.seek(20);
+        raFile.write(42);
+        assertEquals(21, raFile.length());
+        raFile.seek(0);
+        final byte[] arrayRead = IOUtils.toByteArray(new AdapterIInputStreamToInputStream(raFile));
+        assertEquals(42, arrayRead[20]);
+        for (int i = 0; i < 20; ++i)
+        {
+            assertEquals("Position " + i, 0, arrayRead[0]);
+        }
+        raFile.close();
+    }
+
+    @Test
+    public void testEmptyDatasetDefaultParameters() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testEmptyDatasetDefaultParameters.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFileReadWrite(dataSetFile, dataSetName);
+        raFile.seek(20);
+        raFile.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(dataSetFile);
+        assertTrue(reader.exists(dataSetName));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        assertEquals(0, info.getSize());
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        assertEquals("FILE", info.getTypeInformation().tryGetOpaqueTag());
+        final int[] chunkSizesOrNull = info.tryGetChunkSizes();
+        assertNotNull(chunkSizesOrNull);
+        assertEquals(1, chunkSizesOrNull.length);
+        assertEquals(1024 * 1024, chunkSizesOrNull[0]);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyDatasetOpaqueSmallChunkSize() throws IOException
+    {
+        final File dataSetFile =
+                new File(workingDirectory, "testEmptyDatasetOpaqueSmallChunkSize.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+        final int chunkSize = 10 * 1024;
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(dataSetFile, dataSetName,
+                        HDF5GenericStorageFeatures.GENERIC_CHUNKED, chunkSize, "FILE");
+        raFile.seek(20);
+        raFile.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(dataSetFile);
+        assertTrue(reader.exists(dataSetName));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        assertEquals(0, info.getSize());
+        assertEquals(HDF5StorageLayout.CHUNKED, info.getStorageLayout());
+        assertEquals("FILE", info.getTypeInformation().tryGetOpaqueTag());
+        final int[] chunkSizesOrNull = info.tryGetChunkSizes();
+        assertNotNull(chunkSizesOrNull);
+        assertEquals(1, chunkSizesOrNull.length);
+        assertEquals(chunkSize, chunkSizesOrNull[0]);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyDatasetContiguous() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testEmptyDatasetContiguous.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(dataSetFile, dataSetName,
+                        HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS, 1024, null);
+        raFile.seek(20);
+        raFile.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(dataSetFile);
+        assertTrue(reader.exists(dataSetName));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        assertEquals(1024, info.getSize());
+        assertEquals(HDF5StorageLayout.CONTIGUOUS, info.getStorageLayout());
+        assertNull(info.getTypeInformation().tryGetOpaqueTag());
+        final int[] chunkSizesOrNull = info.tryGetChunkSizes();
+        assertNull(chunkSizesOrNull);
+        reader.close();
+    }
+
+    @Test
+    public void testEmptyDatasetCompact() throws IOException
+    {
+        final File dataSetFile = new File(workingDirectory, "testEmptyDatasetCompact.h5");
+        final String dataSetName = "ds";
+        dataSetFile.delete();
+        assertFalse(dataSetFile.exists());
+        dataSetFile.deleteOnExit();
+        final HDF5DataSetRandomAccessFile raFile =
+                asRandomAccessFile(dataSetFile, dataSetName,
+                        HDF5GenericStorageFeatures.GENERIC_COMPACT, 1024, null);
+        raFile.seek(20);
+        raFile.close();
+        final IHDF5Reader reader = HDF5FactoryProvider.get().openForReading(dataSetFile);
+        assertTrue(reader.exists(dataSetName));
+        final HDF5DataSetInformation info = reader.getDataSetInformation(dataSetName);
+        assertEquals(1024, info.getSize());
+        assertEquals(HDF5StorageLayout.COMPACT, info.getStorageLayout());
+        assertNull(info.getTypeInformation().tryGetOpaqueTag());
+        final int[] chunkSizesOrNull = info.tryGetChunkSizes();
+        assertNull(chunkSizesOrNull);
+        reader.close();
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5CodeGenerator.java b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5CodeGenerator.java
new file mode 100644
index 0000000..aca7f69
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5CodeGenerator.java
@@ -0,0 +1,297 @@
+/*
+ * Copyright 2008 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5.tools;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ * A code generator for the identical parts of the HDF5 Java classes class for different numerical
+ * types.
+ * 
+ * @author Bernd Rinn
+ */
+public class HDF5CodeGenerator
+{
+
+    static class TemplateParameters
+    {
+        final String name;
+
+        final String capitalizedName;
+
+        final String capitalizedClassName;
+
+        final String upperCaseClassName;
+
+        final String wrapperName;
+
+        final String storageType;
+
+        final String featureBasedStorageType;
+
+        final String storageTypeImport;
+
+        final String memoryType;
+
+        final String elementSize;
+
+        final boolean isUnsigned;
+
+        final boolean isInteger;
+
+        TemplateParameters(String name, String capitalizedName, String capitalizedClassName,
+                String wrapperName, String storageType, String featureBasedStorageType,
+                String storageTypeImport, String memoryType, String elementSize,
+                boolean isUnsigned, boolean isInteger)
+        {
+            this.name = name;
+            this.capitalizedName = capitalizedName;
+            this.capitalizedClassName = capitalizedClassName;
+            this.upperCaseClassName = capitalizedClassName.toUpperCase();
+            this.wrapperName = wrapperName;
+            this.storageType = storageType;
+            this.featureBasedStorageType = featureBasedStorageType;
+            this.storageTypeImport = storageTypeImport;
+            this.memoryType = memoryType;
+            this.elementSize = elementSize;
+            this.isUnsigned = isUnsigned;
+            this.isInteger = isInteger;
+        }
+
+    }
+
+    static TemplateParameters tp(String name, String capitalizedName, String capitalizedClassName,
+            String wrapperName, String storageType, String featureBasedStorageType,
+            String storageTypeImport, String memoryType, String elementSize, boolean isUnsigned,
+            boolean isInteger)
+    {
+        return new TemplateParameters(name, capitalizedName, capitalizedClassName, wrapperName,
+                storageType, featureBasedStorageType, storageTypeImport, memoryType, elementSize,
+                isUnsigned, isInteger);
+    }
+
+    static final TemplateParameters PLACEHOLDERS = tp("__name__", "__Name__", "__Classname__",
+            "__Wrappername__", "__Storagetype__", "__FeatureBasedStoragetype__",
+            "__StoragetypeImport__", "__Memorytype__", "__elementsize__", false, false);
+
+    static final TemplateParameters[] NUMERICAL_TYPES =
+            new TemplateParameters[]
+                {
+                        tp("byte",
+                                "Byte",
+                                "Int",
+                                "Byte",
+                                "H5T_STD_I8LE",
+                                "features.isSigned() ? H5T_STD_I8LE : H5T_STD_U8LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I8LE;\n"
+                                        + "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;",
+                                "H5T_NATIVE_INT8", "1", false, true),
+                        tp("byte",
+                                "Byte",
+                                "Int",
+                                "Byte",
+                                "H5T_STD_U8LE",
+                                "H5T_STD_U8LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U8LE;",
+                                "H5T_NATIVE_UINT8", "1", true, true),
+                        tp("short",
+                                "Short",
+                                "Int",
+                                "Short",
+                                "H5T_STD_I16LE",
+                                "features.isSigned() ? H5T_STD_I16LE : H5T_STD_U16LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I16LE;\n"
+                                        + "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;",
+                                "H5T_NATIVE_INT16", "2", false, true),
+                        tp("short",
+                                "Short",
+                                "Int",
+                                "Short",
+                                "H5T_STD_U16LE",
+                                "H5T_STD_U16LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U16LE;",
+                                "H5T_NATIVE_UINT16", "2", true, true),
+                        tp("int",
+                                "Int",
+                                "Int",
+                                "Integer",
+                                "H5T_STD_I32LE",
+                                "features.isSigned() ? H5T_STD_I32LE : H5T_STD_U32LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I32LE;\n"
+                                        + "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;",
+                                "H5T_NATIVE_INT32", "4", false, true),
+                        tp("int",
+                                "Int",
+                                "Int",
+                                "Integer",
+                                "H5T_STD_U32LE",
+                                "H5T_STD_U32LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U32LE;",
+                                "H5T_NATIVE_UINT32", "4", true, true),
+                        tp("long",
+                                "Long",
+                                "Int",
+                                "Long",
+                                "H5T_STD_I64LE",
+                                "features.isSigned() ? H5T_STD_I64LE : H5T_STD_U64LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_I64LE;\n"
+                                        + "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;",
+                                "H5T_NATIVE_INT64", "8", false, true),
+                        tp("long",
+                                "Long",
+                                "Int",
+                                "Long",
+                                "H5T_STD_U64LE",
+                                "H5T_STD_U64LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_STD_U64LE;",
+                                "H5T_NATIVE_UINT64", "8", true, true),
+                        tp("float",
+                                "Float",
+                                "Float",
+                                "Float",
+                                "H5T_IEEE_F32LE",
+                                "H5T_IEEE_F32LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F32LE;",
+                                "H5T_NATIVE_FLOAT", "4", false, false),
+                        tp("double",
+                                "Double",
+                                "Float",
+                                "Double",
+                                "H5T_IEEE_F64LE",
+                                "H5T_IEEE_F64LE",
+                                "import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_IEEE_F64LE;",
+                                "H5T_NATIVE_DOUBLE", "8", false, false) };
+
+    /**
+     * Generate the code for all numerical types from <var>codeTemplate</var> and write it to
+     * <code>stdout</code>.
+     */
+    static void generateCode(final String codeTemplate)
+    {
+        for (TemplateParameters t : NUMERICAL_TYPES)
+        {
+            generateCode(codeTemplate, t, System.out);
+        }
+    }
+
+    /**
+     * Generate the code for all numerical types from <var>codeTemplate</var> and write it to
+     * <code>out</code>.
+     */
+    static void generateCode(final String codeTemplate, final TemplateParameters params,
+            final PrintStream out)
+    {
+        String s = codeTemplate;
+        if (params.isInteger)
+        {
+            s = StringUtils.replace(s, "__SuperInterface__", "IHDF5Unsigned__Name__Writer");
+            s = StringUtils.replace(s, "__OverrideIfInt__", "@Override\n    ");
+            s =
+                    StringUtils
+                            .replace(
+                                    s,
+                                    "__SupressWarning__ ",
+                                    " // Note: It is a trick for keeping backward compatibility to let this interface extend \n"
+                                            + " // IHDF5Unsigned__Name__Writer instead of IHDF5__Name__Reader as it logically should.\n"
+                                            + " // Once we remove IHDF5Unsigned__Name__Writer, uncomment the following line and remove\n"
+                                            + " // all @Override annotations and we are fine again.\n"
+                                            + "//public interface IHDF5__Name__Writer extends IHDF5__Name__Reader\n"
+                                            + "@SuppressWarnings(\"deprecation\")\n");
+            s =
+                    StringUtils
+                            .replace(
+                                    s,
+                                    "__NoteUnsigned__",
+                                    " * <p>   \n"
+                                            + " * <i>Note:</i> If you need to convert from and to unsigned values, use the methods of \n"
+                                            + " * {@link UnsignedIntUtils}.\n");
+        } else
+        {
+            s = StringUtils.replace(s, "__SuperInterface__", "IHDF5__Name__Reader");
+            s = StringUtils.replace(s, "__OverrideIfInt__", "");
+            s = StringUtils.replace(s, "__SupressWarning__ ", "");
+            s = StringUtils.replace(s, "__NoteUnsigned__", "");
+        }
+        s = StringUtils.replace(s, PLACEHOLDERS.name, params.name);
+        s = StringUtils.replace(s, PLACEHOLDERS.capitalizedName, params.capitalizedName);
+        s = StringUtils.replace(s, PLACEHOLDERS.capitalizedClassName, params.capitalizedClassName);
+        s = StringUtils.replace(s, PLACEHOLDERS.upperCaseClassName, params.upperCaseClassName);
+        s = StringUtils.replace(s, PLACEHOLDERS.wrapperName, params.wrapperName);
+        s = StringUtils.replace(s, PLACEHOLDERS.storageType, params.storageType);
+        s =
+                StringUtils.replace(s, PLACEHOLDERS.featureBasedStorageType,
+                        params.featureBasedStorageType);
+        s = StringUtils.replace(s, PLACEHOLDERS.storageTypeImport, params.storageTypeImport);
+        s = StringUtils.replace(s, PLACEHOLDERS.memoryType, params.memoryType);
+        s = StringUtils.replace(s, PLACEHOLDERS.elementSize, params.elementSize);
+        out.println(s);
+    }
+
+    public static void main(String[] args) throws IOException
+    {
+        for (TemplateParameters t : NUMERICAL_TYPES)
+        {
+            if (t.isUnsigned == false)
+            {
+                final String interfaceTemplateReader =
+                        FileUtils
+                                .readFileToString(new File(
+                                        "sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveReader.java.templ"));
+                final PrintStream outInterfaceReader =
+                        new PrintStream(new File("source/java/ch/systemsx/cisd/hdf5/IHDF5"
+                                + t.capitalizedName + "Reader.java"));
+                generateCode(interfaceTemplateReader, t, outInterfaceReader);
+                outInterfaceReader.close();
+            }
+            final String classTemplateReader =
+                    FileUtils
+                            .readFileToString(new File(
+                                    t.isUnsigned ? "sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveReader.java.templ"
+                                            : "sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveReader.java.templ"));
+            final PrintStream outclassReader =
+                    new PrintStream(new File("source/java/ch/systemsx/cisd/hdf5/HDF5"
+                            + (t.isUnsigned ? "Unsigned" : "") + t.capitalizedName + "Reader.java"));
+            generateCode(classTemplateReader, t, outclassReader);
+            outclassReader.close();
+            final String interfaceTemplateWriter =
+                    FileUtils
+                            .readFileToString(new File(
+                                    t.isUnsigned ? "sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5UnsignedPrimitiveWriter.java.templ"
+                                            : "sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveWriter.java.templ"));
+            final PrintStream outInterfaceWriter =
+                    new PrintStream(new File("source/java/ch/systemsx/cisd/hdf5/IHDF5"
+                            + (t.isUnsigned ? "Unsigned" : "") + t.capitalizedName + "Writer.java"));
+            generateCode(interfaceTemplateWriter, t, outInterfaceWriter);
+            outInterfaceWriter.close();
+            final String classTemplateWriter =
+                    FileUtils
+                            .readFileToString(new File(
+                                    t.isUnsigned ? "sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveWriter.java.templ"
+                                            : "sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveWriter.java.templ"));
+            final PrintStream outclassWriter =
+                    new PrintStream(new File("source/java/ch/systemsx/cisd/hdf5/HDF5"
+                            + (t.isUnsigned ? "Unsigned" : "") + t.capitalizedName + "Writer.java"));
+            generateCode(classTemplateWriter, t, outclassWriter);
+            outclassWriter.close();
+        }
+    }
+}
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveReader.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveReader.java.templ
new file mode 100644
index 0000000..dc255ed
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveReader.java.templ
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.__Memorytype__;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5__Name__Reader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5__Name__Reader implements IHDF5__Name__Reader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5__Name__Reader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public __name__ getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__Wrappername__> getAttributeRunnable = new ICallableWithCleanUp<__Wrappername__>()
+            {
+                @Override
+                public __Wrappername__ call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final __name__[] data =
+                            baseReader.h5.readAttributeAs__Name__Array(attributeId, __Memorytype__, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public __name__[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> getAttributeRunnable =
+                new ICallableWithCleanUp<__name__[]>()
+                    {
+                        @Override
+                        public __name__[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return get__Name__ArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MD__Name__Array getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> getAttributeRunnable =
+                new ICallableWithCleanUp<MD__Name__Array>()
+                    {
+                        @Override
+                        public MD__Name__Array call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return get__Name__MDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public __name__[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MD__Name__Array array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public __name__ read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__Wrappername__> readCallable = new ICallableWithCleanUp<__Wrappername__>()
+            {
+                @Override
+                public __Wrappername__ call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final __name__[] data = new __name__[1];
+                    baseReader.h5.readDataSet(dataSetId, __Memorytype__, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> readCallable = new ICallableWithCleanUp<__name__[]>()
+            {
+                @Override
+                public __name__[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return read__Name__Array(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private __name__[] read__Name__Array(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final __name__[] data = new __name__[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return read__Name__ArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private __name__[] read__Name__ArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final __name__[] data = new __name__[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MD__Name__Array array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, __Memorytype__, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MD__Name__Array array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, __Memorytype__, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public __name__[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> readCallable = new ICallableWithCleanUp<__name__[]>()
+            {
+                @Override
+                public __name__[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final __name__[] data = new __name__[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public __name__[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public __name__[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MD__Name__Array readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MD__Name__Array(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MD__Name__Array readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MD__Name__Array(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MD__Name__Array readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> readCallable = new ICallableWithCleanUp<MD__Name__Array>()
+            {
+                @Override
+                public MD__Name__Array call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return read__Name__MDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MD__Name__Array read__Name__MDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final __name__[] data = new __name__[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MD__Name__Array(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return read__Name__MDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MD__Name__Array read__Name__MDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final __name__[] data = new __name__[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MD__Name__Array(data, arrayDimensions);
+        } else
+        {
+            final __name__[] data =
+                    new __name__[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MD__Name__Array(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MD__Name__Array readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MD__Name__Array readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> readCallable = new ICallableWithCleanUp<MD__Name__Array>()
+            {
+                @Override
+                public MD__Name__Array call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final __name__[] dataBlock = new __name__[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, __Memorytype__,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MD__Name__Array(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MD__Name__Array readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+                }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final __name__[] dataBlock =
+                new __name__[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MD__Name__Array(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<__name__[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<__name__[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<__name__[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<__name__[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<__name__[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final __name__[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<__name__[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MD__Name__Array>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MD__Name__Array>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MD__Name__Array>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MD__Name__Array>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MD__Name__Array> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MD__Name__Array data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MD__Name__Array>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    __name__[] get__Name__ArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(__Memorytype__, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = __Memorytype__;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final __name__[] data =
+                baseReader.h5.readAttributeAs__Name__Array(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MD__Name__Array get__Name__MDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(__Memorytype__,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = __Memorytype__;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final __name__[] data =
+                    baseReader.h5.readAttributeAs__Name__Array(attributeId,
+                            memoryTypeId, len);
+            return new MD__Name__Array(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveWriter.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveWriter.java.templ
new file mode 100644
index 0000000..b066bae
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5PrimitiveWriter.java.templ
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5__Classname__StorageFeatures.__CLASSNAME___NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.__Memorytype__;
+__StoragetypeImport__
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5__Name__Writer}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5__Name__Writer extends HDF5__Name__Reader implements IHDF5__Name__Writer
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5__Name__Writer(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final __name__ value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, __Storagetype__,
+                                        __Memorytype__, dataSpaceId, new __name__[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, __Storagetype__,
+                                        __Memorytype__, -1, new __name__[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final __name__[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, __Storagetype__, __Memorytype__,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(__Memorytype__, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(__Storagetype__, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MD__Name__Array value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, __Storagetype__, __Memorytype__,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(__Memorytype__, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(__Storagetype__, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final __name__[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MD__Name__Array(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final __name__ value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, __Storagetype__, __Memorytype__, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final __name__[] data)
+    {
+        writeArray(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final __name__[] data,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                __FeatureBasedStoragetype__, new long[]
+                                { data.length }, __elementsize__, features, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, new long[] { 0 }, new long[] { size }, __elementsize__, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, new long[] { size }, null, __elementsize__, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, __elementsize__, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final __name__[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final __name__[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final __name__[][] data)
+    {
+        writeMatrix(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final __name__[][] data, 
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MD__Name__Array(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final __name__[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MD__Name__Array(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final __name__[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final __name__[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MD__Name__Array data)
+    {
+        writeMDArray(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MD__Name__Array data,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, __FeatureBasedStoragetype__, 
+                                    data.longDimensions(), __elementsize__, features, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), __elementsize__, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                                features, MDArray.toLong(dimensions), null, __elementsize__, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), __elementsize__, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MD__Name__Array data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MD__Name__Array data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MD__Name__Array data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MD__Name__Array data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MD__Name__Array data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveReader.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveReader.java.templ
new file mode 100644
index 0000000..3749bd6
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveReader.java.templ
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import static ch.systemsx.cisd.hdf5.MatrixUtils.cardinalityBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.checkBoundIndices;
+import static ch.systemsx.cisd.hdf5.MatrixUtils.createFullBlockDimensionsAndOffset;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5T_ARRAY;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.__Memorytype__;
+
+import java.util.Arrays;
+import java.util.Iterator;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.exceptions.HDF5SpaceRankMismatch;
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+import ch.systemsx.cisd.hdf5.HDF5BaseReader.DataSpaceParameters;
+import ch.systemsx.cisd.hdf5.HDF5DataTypeInformation.DataTypeInfoOptions;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+import ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants;
+
+/**
+ * The implementation of {@link IHDF5__Name__Reader}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5Unsigned__Name__Reader implements IHDF5__Name__Reader
+{
+    private final HDF5BaseReader baseReader;
+
+    HDF5Unsigned__Name__Reader(HDF5BaseReader baseReader)
+    {
+        assert baseReader != null;
+
+        this.baseReader = baseReader;
+    }
+
+    // For Unit tests only.
+    HDF5BaseReader getBaseReader()
+    {
+        return baseReader;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public __name__ getAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__Wrappername__> getAttributeRunnable = new ICallableWithCleanUp<__Wrappername__>()
+            {
+                @Override
+                public __Wrappername__ call(ICleanUpRegistry registry)
+                {
+                    final int objectId =
+                            baseReader.h5.openObject(baseReader.fileId, objectPath, registry);
+                    final int attributeId =
+                            baseReader.h5.openAttribute(objectId, attributeName, registry);
+                    final __name__[] data =
+                            baseReader.h5.readAttributeAs__Name__Array(attributeId, __Memorytype__, 1);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public __name__[] getArrayAttr(final String objectPath, final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> getAttributeRunnable =
+                new ICallableWithCleanUp<__name__[]>()
+                    {
+                        @Override
+                        public __name__[] call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return get__Name__ArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public MD__Name__Array getMDArrayAttr(final String objectPath,
+            final String attributeName)
+    {
+        assert objectPath != null;
+        assert attributeName != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> getAttributeRunnable =
+                new ICallableWithCleanUp<MD__Name__Array>()
+                    {
+                        @Override
+                        public MD__Name__Array call(ICleanUpRegistry registry)
+                        {
+                            final int objectId =
+                                    baseReader.h5.openObject(baseReader.fileId, objectPath,
+                                            registry);
+                            return get__Name__MDArrayAttribute(objectId, attributeName, registry);
+                        }
+                    };
+        return baseReader.runner.call(getAttributeRunnable);
+    }
+
+    @Override
+    public __name__[][] getMatrixAttr(final String objectPath, final String attributeName)
+            throws HDF5JavaException
+    {
+        final MD__Name__Array array = getMDArrayAttr(objectPath, attributeName);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public __name__ read(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__Wrappername__> readCallable = new ICallableWithCleanUp<__Wrappername__>()
+            {
+                @Override
+                public __Wrappername__ call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final __name__[] data = new __name__[1];
+                    baseReader.h5.readDataSet(dataSetId, __Memorytype__, data);
+                    return data[0];
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[] readArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> readCallable = new ICallableWithCleanUp<__name__[]>()
+            {
+                @Override
+                public __name__[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return read__Name__Array(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    private __name__[] read__Name__Array(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final __name__[] data = new __name__[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return data;
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return read__Name__ArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private __name__[] read__Name__ArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int spaceId = baseReader.h5.createScalarDataSpace();
+        final int[] dimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final __name__[] data = new __name__[HDF5Utils.getOneDimensionalArraySize(dimensions)];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, data.length, registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+        return data;
+    }
+
+    @Override
+    public int[] readToMDArrayWithOffset(final String objectPath, final MD__Name__Array array,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, __Memorytype__, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array.
+                            getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public int[] readToMDArrayBlockWithOffset(final String objectPath,
+            final MD__Name__Array array, final int[] blockDimensions, final long[] offset,
+            final int[] memoryOffset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<int[]> readCallable = new ICallableWithCleanUp<int[]>()
+            {
+                @Override
+                public int[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getBlockSpaceParameters(dataSetId, memoryOffset, array
+                                    .dimensions(), offset, blockDimensions, registry);
+                    final int nativeDataTypeId =
+                            baseReader.getNativeDataTypeId(dataSetId, __Memorytype__, registry);
+                    baseReader.h5.readDataSet(dataSetId, nativeDataTypeId, 
+                            spaceParams.memorySpaceId, spaceParams.dataSpaceId, array
+                            .getAsFlatArray());
+                    return MDArray.toInt(spaceParams.dimensions);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[] readArrayBlock(final String objectPath, final int blockSize,
+            final long blockNumber)
+    {
+        return readArrayBlockWithOffset(objectPath, blockSize, blockNumber * blockSize);
+    }
+
+    @Override
+    public __name__[] readArrayBlockWithOffset(final String objectPath, final int blockSize,
+            final long offset)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<__name__[]> readCallable = new ICallableWithCleanUp<__name__[]>()
+            {
+                @Override
+                public __name__[] call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    final DataSpaceParameters spaceParams =
+                            baseReader.getSpaceParameters(dataSetId, offset, blockSize, registry);
+                    final __name__[] data = new __name__[spaceParams.blockSize];
+                    baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                            spaceParams.dataSpaceId, data);
+                    return data;
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    @Override
+    public __name__[][] readMatrix(final String objectPath) throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArray(objectPath);
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public __name__[][] readMatrixBlock(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long blockNumberX, final long blockNumberY) 
+            throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArrayBlock(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { blockNumberX, blockNumberY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public __name__[][] readMatrixBlockWithOffset(final String objectPath, final int blockSizeX,
+            final int blockSizeY, final long offsetX, final long offsetY) throws HDF5JavaException
+    {
+        final MD__Name__Array array = readMDArrayBlockWithOffset(objectPath, new int[]
+            { blockSizeX, blockSizeY }, new long[]
+            { offsetX, offsetY });
+        if (array.rank() != 2)
+        {
+            throw new HDF5JavaException("Array is supposed to be of rank 2, but is of rank "
+                    + array.rank());
+        }
+        return array.toMatrix();
+    }
+
+    @Override
+    public MD__Name__Array readMDArraySlice(String objectPath, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, cardBoundIndices);
+        final int[] effectiveBlockDimensions = new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MD__Name__Array(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MD__Name__Array readMDArraySlice(String objectPath, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        final int cardBoundIndices = cardinalityBoundIndices(boundIndices);
+        checkBoundIndices(objectPath, fullDimensions, boundIndices);
+        final int[] effectiveBlockDimensions =
+                new int[fullBlockDimensions.length - cardBoundIndices];
+        Arrays.fill(effectiveBlockDimensions, -1);
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, null, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        if (fullBlockDimensions.length == cardBoundIndices) // no free indices
+        {
+	        return new MD__Name__Array(result.getAsFlatArray(), new int[] { 1 });
+	    } else
+	    {
+	        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+	    }
+    }
+
+    @Override
+    public MD__Name__Array readMDArray(final String objectPath)
+    {
+        assert objectPath != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> readCallable = new ICallableWithCleanUp<MD__Name__Array>()
+            {
+                @Override
+                public MD__Name__Array call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    return read__Name__MDArray(dataSetId, registry);
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+
+    MD__Name__Array read__Name__MDArray(int dataSetId, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final DataSpaceParameters spaceParams =
+                    baseReader.getSpaceParameters(dataSetId, registry);
+            final __name__[] data = new __name__[spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, __Memorytype__, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MD__Name__Array(data, spaceParams.dimensions);
+        } catch (HDF5LibraryException ex)
+        {
+            if (ex.getMajorErrorNumber() == HDF5Constants.H5E_DATATYPE
+                    && ex.getMinorErrorNumber() == HDF5Constants.H5E_CANTINIT)
+            {
+                // Check whether it is an array data type.
+                final int dataTypeId = baseReader.h5.getDataTypeForDataSet(dataSetId, registry);
+                if (baseReader.h5.getClassType(dataTypeId) == HDF5Constants.H5T_ARRAY)
+                {
+                    return read__Name__MDArrayFromArrayType(dataSetId, dataTypeId, registry);
+                }
+            }
+            throw ex;
+        }
+    }
+
+    private MD__Name__Array read__Name__MDArrayFromArrayType(int dataSetId, final int dataTypeId,
+            ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = baseReader.h5.getArrayDimensions(dataTypeId);
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, arrayDimensions, registry);
+        final DataSpaceParameters spaceParams = baseReader.getSpaceParameters(dataSetId, registry);
+        if (spaceParams.blockSize == 0)
+        {
+            final int spaceId = baseReader.h5.createScalarDataSpace();
+            final __name__[] data = new __name__[MDArray.getLength(arrayDimensions)];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceId, spaceId, data);
+            return new MD__Name__Array(data, arrayDimensions);
+        } else
+        {
+            final __name__[] data =
+                    new __name__[MDArray.getLength(arrayDimensions) * spaceParams.blockSize];
+            baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                    spaceParams.dataSpaceId, data);
+            return new MD__Name__Array(data, MatrixUtils.concat(MDArray.toInt(spaceParams.dimensions),
+                    arrayDimensions));
+        }
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readSlicedMDArrayBlockWithOffset(objectPath, blockDimensions, offset, boundIndices);
+    }
+
+    @Override
+    public MD__Name__Array readMDArrayBlock(final String objectPath, final int[] blockDimensions,
+            final long[] blockNumber)
+    {
+        final long[] offset = new long[blockDimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * blockDimensions[i];
+        }
+        return readMDArrayBlockWithOffset(objectPath, blockDimensions, offset);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices)
+    {
+        baseReader.checkOpen();
+        final int[] effectiveBlockDimensions = blockDimensions.clone();
+        final long[] fullDimensions = baseReader.getDimensions(objectPath);
+        final int[] fullBlockDimensions = new int[fullDimensions.length];
+        final long[] fullOffset = new long[fullDimensions.length];
+        checkBoundIndices(objectPath, fullDimensions, blockDimensions,
+                cardinalityBoundIndices(boundIndices));
+        createFullBlockDimensionsAndOffset(effectiveBlockDimensions, offset, boundIndices, fullDimensions,
+                fullBlockDimensions, fullOffset);
+        final MD__Name__Array result = readMDArrayBlockWithOffset(objectPath, fullBlockDimensions, fullOffset);
+        return new MD__Name__Array(result.getAsFlatArray(), effectiveBlockDimensions);
+    }
+
+    @Override
+    public MD__Name__Array readMDArrayBlockWithOffset(final String objectPath,
+            final int[] blockDimensions, final long[] offset)
+    {
+        assert objectPath != null;
+        assert blockDimensions != null;
+        assert offset != null;
+
+        baseReader.checkOpen();
+        final ICallableWithCleanUp<MD__Name__Array> readCallable = new ICallableWithCleanUp<MD__Name__Array>()
+            {
+                @Override
+                public MD__Name__Array call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId = 
+                            baseReader.h5.openDataSet(baseReader.fileId, objectPath, registry);
+                    try
+                    {
+                        final DataSpaceParameters spaceParams =
+                                baseReader.getSpaceParameters(dataSetId, offset,
+                                        blockDimensions, registry);
+                        final __name__[] dataBlock = new __name__[spaceParams.blockSize];
+                        baseReader.h5.readDataSet(dataSetId, __Memorytype__,
+                                spaceParams.memorySpaceId, spaceParams.dataSpaceId,
+                                dataBlock);
+                        return new MD__Name__Array(dataBlock, spaceParams.dimensions);
+                    } catch (HDF5SpaceRankMismatch ex)
+                    {
+                        final HDF5DataSetInformation info =
+                                baseReader.getDataSetInformation(objectPath,
+                                        DataTypeInfoOptions.MINIMAL, false);
+                        if (ex.getSpaceRankExpected() - ex.getSpaceRankFound() == info
+                                .getTypeInformation().getRank())
+                        {
+                            return readMDArrayBlockOfArrays(dataSetId, blockDimensions,
+                                    offset, info, ex.getSpaceRankFound(), registry);
+                        } else
+                        {
+                            throw ex;
+                        }
+                    }
+                }
+            };
+        return baseReader.runner.call(readCallable);
+    }
+    
+    private MD__Name__Array readMDArrayBlockOfArrays(final int dataSetId, final int[] blockDimensions,
+            final long[] offset, final HDF5DataSetInformation info, final int spaceRank,
+            final ICleanUpRegistry registry)
+    {
+        final int[] arrayDimensions = info.getTypeInformation().getDimensions();
+        int[] effectiveBlockDimensions = blockDimensions;
+        // We do not support block-wise reading of array types, check
+        // that we do not have to and bail out otherwise.
+        for (int i = 0; i < arrayDimensions.length; ++i)
+        {
+            final int j = spaceRank + i;
+            if (effectiveBlockDimensions[j] < 0)
+            {
+                if (effectiveBlockDimensions == blockDimensions)
+                {
+                    effectiveBlockDimensions = blockDimensions.clone();
+            }
+                effectiveBlockDimensions[j] = arrayDimensions[i];
+            }
+            if (effectiveBlockDimensions[j] != arrayDimensions[i])
+            {
+                throw new HDF5JavaException(
+                        "Block-wise reading of array type data sets is not supported.");
+            }
+        }
+        final int[] spaceBlockDimensions = Arrays.copyOfRange(effectiveBlockDimensions, 0, spaceRank);
+        final long[] spaceOfs = Arrays.copyOfRange(offset, 0, spaceRank);
+        final DataSpaceParameters spaceParams =
+                baseReader.getSpaceParameters(dataSetId, spaceOfs, spaceBlockDimensions, registry);
+        final __name__[] dataBlock =
+                new __name__[spaceParams.blockSize * info.getTypeInformation().getNumberOfElements()];
+        final int memoryDataTypeId =
+                baseReader.h5.createArrayType(__Memorytype__, info.getTypeInformation()
+                        .getDimensions(), registry);
+        baseReader.h5.readDataSet(dataSetId, memoryDataTypeId, spaceParams.memorySpaceId,
+                spaceParams.dataSpaceId, dataBlock);
+        return new MD__Name__Array(dataBlock, effectiveBlockDimensions);
+    }
+
+    @Override
+    public Iterable<HDF5DataBlock<__name__[]>> getArrayNaturalBlocks(final String dataSetPath)
+            throws HDF5JavaException
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlock1DParameters params =
+                new HDF5NaturalBlock1DParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5DataBlock<__name__[]>>()
+            {
+                @Override
+                public Iterator<HDF5DataBlock<__name__[]>> iterator()
+                {
+                    return new Iterator<HDF5DataBlock<__name__[]>>()
+                        {
+                            final HDF5NaturalBlock1DParameters.HDF5NaturalBlock1DIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5DataBlock<__name__[]> next()
+                            {
+                                final long offset = index.computeOffsetAndSizeGetOffset();
+                                final __name__[] block =
+                                        readArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5DataBlock<__name__[]>(block, index.getAndIncIndex(), 
+                                        offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    @Override
+    public Iterable<HDF5MDDataBlock<MD__Name__Array>> getMDArrayNaturalBlocks(final String dataSetPath)
+    {
+        baseReader.checkOpen();
+        final HDF5NaturalBlockMDParameters params =
+                new HDF5NaturalBlockMDParameters(baseReader.getDataSetInformation(dataSetPath));
+
+        return new Iterable<HDF5MDDataBlock<MD__Name__Array>>()
+            {
+                @Override
+                public Iterator<HDF5MDDataBlock<MD__Name__Array>> iterator()
+                {
+                    return new Iterator<HDF5MDDataBlock<MD__Name__Array>>()
+                        {
+                            final HDF5NaturalBlockMDParameters.HDF5NaturalBlockMDIndex index =
+                                    params.getNaturalBlockIndex();
+
+                            @Override
+                            public boolean hasNext()
+                            {
+                                return index.hasNext();
+                            }
+
+                            @Override
+                            public HDF5MDDataBlock<MD__Name__Array> next()
+                            {
+                                final long[] offset = index.computeOffsetAndSizeGetOffsetClone();
+                                final MD__Name__Array data =
+                                        readMDArrayBlockWithOffset(dataSetPath, index
+                                                .getBlockSize(), offset);
+                                return new HDF5MDDataBlock<MD__Name__Array>(data, index
+                                        .getIndexClone(), offset);
+                            }
+
+                            @Override
+                            public void remove()
+                            {
+                                throw new UnsupportedOperationException();
+                            }
+                        };
+                }
+            };
+    }
+
+    __name__[] get__Name__ArrayAttribute(final int objectId, final String attributeName,
+            ICleanUpRegistry registry)
+    {
+        final int attributeId =
+                baseReader.h5.openAttribute(objectId, attributeName, registry);
+        final int attributeTypeId =
+                baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+        final int memoryTypeId;
+        final int len;
+        if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+        {
+            final int[] arrayDimensions =
+                    baseReader.h5.getArrayDimensions(attributeTypeId);
+            if (arrayDimensions.length != 1)
+            {
+                throw new HDF5JavaException(
+                        "Array needs to be of rank 1, but is of rank "
+                                + arrayDimensions.length);
+            }
+            len = arrayDimensions[0];
+            memoryTypeId =
+                    baseReader.h5.createArrayType(__Memorytype__, len,
+                            registry);
+        } else
+        {
+            final long[] arrayDimensions =
+                    baseReader.h5.getDataDimensionsForAttribute(attributeId,
+                            registry);
+            memoryTypeId = __Memorytype__;
+            len = HDF5Utils.getOneDimensionalArraySize(arrayDimensions);
+        }
+        final __name__[] data =
+                baseReader.h5.readAttributeAs__Name__Array(attributeId,
+                        memoryTypeId, len);
+        return data;
+    }
+
+    MD__Name__Array get__Name__MDArrayAttribute(final int objectId,
+            final String attributeName, ICleanUpRegistry registry)
+    {
+        try
+        {
+            final int attributeId =
+                    baseReader.h5.openAttribute(objectId, attributeName, registry);
+            final int attributeTypeId =
+                    baseReader.h5.getDataTypeForAttribute(attributeId, registry);
+            final int memoryTypeId;
+            final int[] arrayDimensions;
+            if (baseReader.h5.getClassType(attributeTypeId) == H5T_ARRAY)
+            {
+                arrayDimensions = baseReader.h5.getArrayDimensions(attributeTypeId);
+                memoryTypeId =
+                        baseReader.h5.createArrayType(__Memorytype__,
+                                arrayDimensions, registry);
+            } else
+            {
+                arrayDimensions =
+                        MDArray.toInt(baseReader.h5.getDataDimensionsForAttribute(
+                                attributeId, registry));
+                memoryTypeId = __Memorytype__;
+            }
+            final int len = MDArray.getLength(arrayDimensions);
+            final __name__[] data =
+                    baseReader.h5.readAttributeAs__Name__Array(attributeId,
+                            memoryTypeId, len);
+            return new MD__Name__Array(data, arrayDimensions);
+        } catch (IllegalArgumentException ex)
+        {
+            throw new HDF5JavaException(ex.getMessage());
+        }
+    }
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveWriter.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveWriter.java.templ
new file mode 100644
index 0000000..cd7168b
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/HDF5UnsignedPrimitiveWriter.java.templ
@@ -0,0 +1,702 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+
+import static ch.systemsx.cisd.hdf5.HDF5__Classname__StorageFeatures.__CLASSNAME___NO_COMPRESSION;
+import static ch.systemsx.cisd.hdf5.hdf5lib.H5D.H5Dwrite;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5P_DEFAULT;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.H5S_ALL;
+import static ch.systemsx.cisd.hdf5.hdf5lib.HDF5Constants.__Memorytype__;
+__StoragetypeImport__
+
+import ch.systemsx.cisd.base.mdarray.MDArray;
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+import ch.systemsx.cisd.hdf5.cleanup.ICallableWithCleanUp;
+import ch.systemsx.cisd.hdf5.cleanup.ICleanUpRegistry;
+
+/**
+ * The implementation of {@link IHDF5__Name__Writer}.
+ * 
+ * @author Bernd Rinn
+ */
+class HDF5Unsigned__Name__Writer extends HDF5Unsigned__Name__Reader implements IHDF5__Name__Writer
+{
+    private final HDF5BaseWriter baseWriter;
+
+    HDF5Unsigned__Name__Writer(HDF5BaseWriter baseWriter)
+    {
+        super(baseWriter);
+        assert baseWriter != null;
+
+        this.baseWriter = baseWriter;
+    }
+
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    @Override
+    public void setAttr(final String objectPath, final String name, final __name__ value)
+    {
+        assert objectPath != null;
+        assert name != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Object> addAttributeRunnable =
+                new ICallableWithCleanUp<Object>()
+                    {
+                        @Override
+                        public Object call(ICleanUpRegistry registry)
+                        {
+                            if (baseWriter.useSimpleDataSpaceForAttributes)
+                            {
+                                final int dataSpaceId =
+                                        baseWriter.h5.createSimpleDataSpace(new long[]
+                                            { 1 }, registry);
+                                baseWriter.setAttribute(objectPath, name, __Storagetype__,
+                                        __Memorytype__, dataSpaceId, new __name__[]
+                                            { value }, registry);
+                            } else
+                            {
+                                baseWriter.setAttribute(objectPath, name, __Storagetype__,
+                                        __Memorytype__, -1, new __name__[]
+                                            { value }, registry);
+                            }
+                            return null; // Nothing to return.
+                        }
+                    };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setArrayAttr(final String objectPath, final String name,
+            final __name__[] value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> setAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId = baseWriter.h5.createSimpleDataSpace(new long[]
+                            { value.length }, registry);
+                        baseWriter.setAttribute(objectPath, name, __Storagetype__, __Memorytype__,
+                                dataSpaceId, value, registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(__Memorytype__, value.length, registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(__Storagetype__, value.length, registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1, value, 
+                                registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(setAttributeRunnable);
+    }
+
+    @Override
+    public void setMDArrayAttr(final String objectPath, final String name,
+            final MD__Name__Array value)
+    {
+        assert objectPath != null;
+        assert name != null;
+        assert value != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> addAttributeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (baseWriter.useSimpleDataSpaceForAttributes)
+                    {
+                        final int dataSpaceId =
+                                baseWriter.h5.createSimpleDataSpace(value.longDimensions(), registry);
+                        baseWriter.setAttribute(objectPath, name, __Storagetype__, __Memorytype__,
+                                dataSpaceId, value.getAsFlatArray(), registry);
+                    } else
+                    {
+                        final int memoryTypeId =
+                                baseWriter.h5.createArrayType(__Memorytype__, value.dimensions(),
+                                        registry);
+                        final int storageTypeId =
+                                baseWriter.h5.createArrayType(__Storagetype__, value.dimensions(),
+                                        registry);
+                        baseWriter.setAttribute(objectPath, name, storageTypeId, memoryTypeId, -1,
+                                value.getAsFlatArray(), registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(addAttributeRunnable);
+    }
+
+    @Override
+    public void setMatrixAttr(final String objectPath, final String name,
+            final __name__[][] value)
+    {
+        setMDArrayAttr(objectPath, name, new MD__Name__Array(value));
+    }
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    @Override
+    public void write(final String objectPath, final __name__ value)
+    {
+        assert objectPath != null;
+
+        baseWriter.checkOpen();
+        baseWriter.writeScalar(objectPath, __Storagetype__, __Memorytype__, value);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final __name__[] data)
+    {
+        writeArray(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeArray(final String objectPath, final __name__[] data,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, 
+                                __FeatureBasedStoragetype__, new long[]
+                                { data.length }, __elementsize__, features, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size)
+    {
+        createArray(objectPath, size, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize)
+    {
+        createArray(objectPath, size, blockSize, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final int size,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, new long[] { 0 }, new long[] { size }, __elementsize__, registry);
+
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, new long[] { size }, null, __elementsize__, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createArray(final String objectPath, final long size, final int blockSize,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert size >= 0;
+        assert blockSize >= 0 && (blockSize <= size || size == 0);
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                        features, new long[] { size }, new long[]
+                        { blockSize }, __elementsize__, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeArrayBlock(final String objectPath, final __name__[] data,
+            final long blockNumber)
+    {
+        writeArrayBlockWithOffset(objectPath, data, data.length, data.length * blockNumber);
+    }
+
+    @Override
+    public void writeArrayBlockWithOffset(final String objectPath, final __name__[] data,
+            final int dataSize, final long offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] blockDimensions = new long[]
+                        { dataSize };
+                    final long[] slabStartOrNull = new long[]
+                        { offset };
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, new long[]
+                                        { offset + dataSize }, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, slabStartOrNull, blockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(blockDimensions, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    @Override
+    public void writeMatrix(final String objectPath, final __name__[][] data)
+    {
+        writeMatrix(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMatrix(final String objectPath, final __name__[][] data, 
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert HDF5Utils.areMatrixDimensionsConsistent(data);
+
+        writeMDArray(objectPath, new MD__Name__Array(data), features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final int sizeX, 
+            final int sizeY, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+
+        createMDArray(objectPath, new int[] { sizeX, sizeY }, features);
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY });
+    }
+
+    @Override
+    public void createMatrix(final String objectPath, final long sizeX, final long sizeY,
+            final int blockSizeX, final int blockSizeY, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert sizeX >= 0;
+        assert sizeY >= 0;
+        assert blockSizeX >= 0 && (blockSizeX <= sizeX || sizeX == 0);
+        assert blockSizeY >= 0 && (blockSizeY <= sizeY || sizeY == 0);
+
+        createMDArray(objectPath, new long[] { sizeX, sizeY }, new int[] { blockSizeX, blockSizeY }, features);
+    }
+
+    @Override
+    public void writeMatrixBlock(final String objectPath, final __name__[][] data,
+            final long blockNumberX, final long blockNumberY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlock(objectPath, new MD__Name__Array(data), new long[]
+            { blockNumberX, blockNumberY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final __name__[][] data,
+            final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data, new int[]
+            { data.length, data[0].length }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMatrixBlockWithOffset(final String objectPath, final __name__[][] data,
+            final int dataSizeX, final int dataSizeY, final long offsetX, final long offsetY)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data, new int[]
+            { dataSizeX, dataSizeY }), new long[]
+            { offsetX, offsetY });
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MD__Name__Array data)
+    {
+        writeMDArray(objectPath, data, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, IndexMap boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, long[] boundIndices)
+    {
+        baseWriter.checkOpen();
+
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), null, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArray(final String objectPath, final MD__Name__Array data,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert data != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final int dataSetId =
+                            baseWriter.getOrCreateDataSetId(objectPath, __FeatureBasedStoragetype__, 
+                                    data.longDimensions(), __elementsize__, features, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, H5S_ALL, H5S_ALL, H5P_DEFAULT, 
+                            data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions)
+    {
+        createMDArray(objectPath, dimensions, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions)
+    {
+        createMDArray(objectPath, dimensions, blockDimensions, __CLASSNAME___NO_COMPRESSION);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final int[] dimensions,
+            final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    if (features.requiresChunking())
+                    {
+                        final long[] nullDimensions = new long[dimensions.length];
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                                features,
+                                nullDimensions, MDArray.toLong(dimensions), __elementsize__, registry);
+                    } else
+                    {
+                        baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                                features, MDArray.toLong(dimensions), null, __elementsize__, registry);
+                    }
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void createMDArray(final String objectPath, final long[] dimensions,
+            final int[] blockDimensions, final HDF5__Classname__StorageFeatures features)
+    {
+        assert objectPath != null;
+        assert dimensions != null;
+        assert blockDimensions != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> createRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    baseWriter.createDataSet(objectPath, __FeatureBasedStoragetype__, 
+                            features, dimensions, 
+                            MDArray.toLong(blockDimensions), __elementsize__, registry);
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(createRunnable);
+    }
+
+    @Override
+    public void writeMDArrayBlock(final String objectPath, final MD__Name__Array data,
+            final long[] blockNumber)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeMDArrayBlockWithOffset(objectPath, data, offset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlock(final String objectPath, final MD__Name__Array data,
+            final long[] blockNumber, IndexMap boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+    
+    @Override
+    public void writeSlicedMDArrayBlock(String objectPath, MD__Name__Array data, long[] blockNumber,
+            long[] boundIndices)
+    {
+        assert blockNumber != null;
+
+        final long[] dimensions = data.longDimensions();
+        final long[] offset = new long[dimensions.length];
+        for (int i = 0; i < offset.length; ++i)
+        {
+            offset[i] = blockNumber[i] * dimensions[i];
+        }
+        writeSlicedMDArrayBlockWithOffset(objectPath, data, offset, boundIndices);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MD__Name__Array data,
+            final long[] offset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] dimensions = data.longDimensions();
+                    assert dimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[dimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + dimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, dimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(dimensions, registry);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId, 
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, IndexMap boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, long[] boundIndices)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final int fullRank = baseWriter.getRank(objectPath);
+        final int[] fullBlockDimensions = new int[fullRank];
+        final long[] fullOffset = new long[fullRank];
+        MatrixUtils.createFullBlockDimensionsAndOffset(data.dimensions(), offset, boundIndices,
+                fullRank, fullBlockDimensions, fullOffset);
+        writeMDArrayBlockWithOffset(objectPath, new MD__Name__Array(data.getAsFlatArray(),
+                fullBlockDimensions), fullOffset);
+    }
+
+    @Override
+    public void writeMDArrayBlockWithOffset(final String objectPath, final MD__Name__Array data,
+            final int[] blockDimensions, final long[] offset, final int[] memoryOffset)
+    {
+        assert objectPath != null;
+        assert data != null;
+        assert offset != null;
+
+        baseWriter.checkOpen();
+        final ICallableWithCleanUp<Void> writeRunnable = new ICallableWithCleanUp<Void>()
+            {
+                @Override
+                public Void call(ICleanUpRegistry registry)
+                {
+                    final long[] memoryDimensions = data.longDimensions();
+                    assert memoryDimensions.length == offset.length;
+                    final long[] longBlockDimensions = MDArray.toLong(blockDimensions);
+                    assert longBlockDimensions.length == offset.length;
+                    final long[] dataSetDimensions = new long[blockDimensions.length];
+                    for (int i = 0; i < offset.length; ++i)
+                    {
+                        dataSetDimensions[i] = offset[i] + blockDimensions[i];
+                    }
+                    final int dataSetId =
+                            baseWriter.h5.openAndExtendDataSet(baseWriter.fileId, objectPath,
+                                    baseWriter.fileFormat, dataSetDimensions, -1, registry);
+                    final int dataSpaceId = 
+                            baseWriter.h5.getDataSpaceForDataSet(dataSetId, registry);
+                    baseWriter.h5.setHyperslabBlock(dataSpaceId, offset, longBlockDimensions);
+                    final int memorySpaceId = 
+                            baseWriter.h5.createSimpleDataSpace(memoryDimensions, registry);
+                    baseWriter.h5.setHyperslabBlock(memorySpaceId, MDArray.toLong(memoryOffset),
+                            longBlockDimensions);
+                    H5Dwrite(dataSetId, __Memorytype__, memorySpaceId, dataSpaceId,
+                            H5P_DEFAULT, data.getAsFlatArray());
+                    return null; // Nothing to return.
+                }
+            };
+        baseWriter.runner.call(writeRunnable);
+    }
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveReader.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveReader.java.templ
new file mode 100644
index 0000000..5fbaeb1
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveReader.java.templ
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ncsa.hdf.hdf5lib.exceptions.HDF5JavaException;
+
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+
+/**
+ * An interface that provides methods for reading <code>__name__</code> values from HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:<i> If the values read are unsigned, use the methods in {@link UnsignedIntUtils} to convert 
+ * to a larger Java integer type that can hold all values as unsigned.
+ * 
+ * @author Bernd Rinn
+ */
+public interface IHDF5__Name__Reader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Reads a <code>__name__</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public __name__ getAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a <code>__name__[]</code> attribute named <var>attributeName</var> from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute value read from the data set.
+     */
+    public __name__[] getArrayAttr(String objectPath, String attributeName);
+
+    /**
+     * Reads a multi-dimensional array <code>__name__</code> attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute array value read from the data set.
+     */
+    public MD__Name__Array getMDArrayAttr(String objectPath,
+            String attributeName);
+
+    /**
+     * Reads a <code>__name__</code> matrix attribute named <var>attributeName</var>
+     * from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param attributeName The name of the attribute to read.
+     * @return The attribute matrix value read from the data set.
+     */
+    public __name__[][] getMatrixAttr(String objectPath, String attributeName)
+            throws HDF5JavaException;
+
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Reads a <code>__name__</code> value from the data set <var>objectPath</var>. This method 
+     * doesn't check the data space but simply reads the first value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The value read from the data set.
+     */
+    public __name__ read(String objectPath);
+
+    /**
+     * Reads a <code>__name__</code> array (of rank 1) from the data set <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public __name__[] readArray(String objectPath);
+
+    /**
+     * Reads a multi-dimensional <code>__name__</code> array data set <var>objectPath</var>
+     * into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param memoryOffset The offset in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayWithOffset(String objectPath, 
+    				MD__Name__Array array, int[] memoryOffset);
+
+    /**
+     * Reads a block of the multi-dimensional <code>__name__</code> array data set
+     * <var>objectPath</var> into a given <var>array</var> in memory.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param array The array to read the data into.
+     * @param blockDimensions The size of the block to read along each axis.
+     * @param offset The offset of the block in the data set.
+     * @param memoryOffset The offset of the block in the array to write the data to.
+     * @return The effective dimensions of the block in <var>array</var> that was filled.
+     */
+    public int[] readToMDArrayBlockWithOffset(String objectPath,
+            MD__Name__Array array, int[] blockDimensions, long[] offset,
+            int[] memoryOffset);
+
+    /**
+     * Reads a block from a <code>__name__</code> array (of rank 1) from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>__name__[]</code> returned
+     *            if the data set is long enough).
+     * @param blockNumber The number of the block to read (starting with 0, offset: multiply with
+     *            <var>blockSize</var>).
+     * @return The data read from the data set. The length will be min(size - blockSize*blockNumber,
+     *         blockSize).
+     */
+    public __name__[] readArrayBlock(String objectPath, int blockSize,
+            long blockNumber);
+
+    /**
+     * Reads a block from <code>__name__</code> array (of rank 1) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSize The block size (this will be the length of the <code>__name__[]</code>
+     *            returned).
+     * @param offset The offset of the block in the data set to start reading from (starting with 0).
+     * @return The data block read from the data set.
+     */
+    public __name__[] readArrayBlockWithOffset(String objectPath, int blockSize,
+            long offset);
+
+    /**
+     * Reads a <code>__name__</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public __name__[][] readMatrix(String objectPath) throws HDF5JavaException;
+
+    /**
+     * Reads a <code>__name__</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>blockSizeX</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>blockSizeY</code>).
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public __name__[][] readMatrixBlock(String objectPath, int blockSizeX,
+            int blockSizeY, long blockNumberX, long blockNumberY) 
+            throws HDF5JavaException;
+
+    /**
+     * Reads a <code>__name__</code> matrix (array of arrays) from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockSizeX The size of the block in the x dimension.
+     * @param blockSizeY The size of the block in the y dimension.
+     * @param offsetX The offset in x dimension in the data set to start reading from.
+     * @param offsetY The offset in y dimension in the data set to start reading from.
+     * @return The data block read from the data set.
+     *
+     * @throws HDF5JavaException If the data set <var>objectPath</var> is not of rank 2.
+     */
+    public __name__[][] readMatrixBlockWithOffset(String objectPath, 
+    				int blockSizeX, int blockSizeY, long offsetX, long offsetY) 
+    				throws HDF5JavaException;
+
+    /**
+     * Reads a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @return The data read from the data set.
+     */
+    public MD__Name__Array readMDArray(String objectPath);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readMDArraySlice(String objectPath, IndexMap boundIndices);
+
+    /**
+     * Reads a slice of a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readMDArraySlice(String objectPath, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>__name__</code> array from the data set 
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readMDArrayBlock(String objectPath,
+    				int[] blockDimensions, long[] blockNumber);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block from a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param blockNumber The block number in each dimension (offset: multiply with the
+     *            <var>blockDimensions</var> in the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readSlicedMDArrayBlock(String objectPath, int[] blockDimensions,
+            long[] blockNumber, long[] boundIndices);
+
+    /**
+     * Reads a block from a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readMDArrayBlockWithOffset(String objectPath,
+            int[] blockDimensions, long[] offset);
+    
+    /**
+     * Reads a sliced block of a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Reads a sliced block of a multi-dimensional <code>__name__</code> array from the data set
+     * <var>objectPath</var>. The slice is defined by "bound indices", each of which is fixed to a
+     * given value. The returned data block only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param blockDimensions The extent of the block in each dimension.
+     * @param offset The offset in the data set to start reading from in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     * @return The data block read from the data set.
+     */
+    public MD__Name__Array readSlicedMDArrayBlockWithOffset(String objectPath, int[] blockDimensions,
+            long[] offset, long[] boundIndices);
+
+    /**
+     * Provides all natural blocks of this one-dimensional data set to iterate over.
+     * 
+     * @see HDF5DataBlock
+     * @throws HDF5JavaException If the data set is not of rank 1.
+     */
+    public Iterable<HDF5DataBlock<__name__[]>> getArrayNaturalBlocks(
+    									String dataSetPath)
+            throws HDF5JavaException;
+
+    /**
+     * Provides all natural blocks of this multi-dimensional data set to iterate over.
+     * 
+     * @see HDF5MDDataBlock
+     */
+    public Iterable<HDF5MDDataBlock<MD__Name__Array>> getMDArrayNaturalBlocks(
+    									String dataSetPath);
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveWriter.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveWriter.java.templ
new file mode 100644
index 0000000..29fbefc
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5PrimitiveWriter.java.templ
@@ -0,0 +1,546 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+
+/**
+ * An interface that provides methods for writing <code>__name__</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.
+__NoteUnsigned__ * 
+ * @author Bernd Rinn
+ */
+__SupressWarning__ public interface IHDF5__Name__Writer extends __SuperInterface__
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>__name__</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    __OverrideIfInt__public void setAttr(String objectPath, String name, __name__ value);
+
+    /**
+     * Set a <code>__name__[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    __OverrideIfInt__public void setArrayAttr(String objectPath, String name, __name__[] value);
+
+    /**
+     * Set a multi-dimensional code>__name__</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    __OverrideIfInt__public void setMDArrayAttr(String objectPath, String name, MD__Name__Array value);
+
+    /**
+     * Set a <code>__name__[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    __OverrideIfInt__public void setMatrixAttr(String objectPath, String name, __name__[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>__name__</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    __OverrideIfInt__public void write(String objectPath, __name__ value);
+
+    /**
+     * Writes out a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    __OverrideIfInt__public void writeArray(String objectPath, __name__[] data);
+
+    /**
+     * Writes out a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void writeArray(String objectPath, __name__[] data, 
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    __OverrideIfInt__public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the __name__ array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    __OverrideIfInt__public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>__name__</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createArray(String objectPath, int size,
+            HDF5__Classname__StorageFeatures features);
+    
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the __name__ array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5__Classname__StorageFeature.__CLASSNAME___NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createArray(String objectPath, long size, int blockSize,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>__name__</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5__Classname__StorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    __OverrideIfInt__public void writeArrayBlock(String objectPath, __name__[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>__name__</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5__Classname__StorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, __name__[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    __OverrideIfInt__public void writeArrayBlockWithOffset(String objectPath, __name__[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    __OverrideIfInt__public void writeMatrix(String objectPath, __name__[][] data);
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void writeMatrix(String objectPath, __name__[][] data, 
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    __OverrideIfInt__public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5__Classname__StorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5__Classname__StorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the __name__ matrix to create.
+     * @param sizeY The size of the y dimension of the __name__ matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    __OverrideIfInt__public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the __name__ matrix to create.
+     * @param sizeY The size of the y dimension of the __name__ matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    __OverrideIfInt__public void writeMatrixBlock(String objectPath, __name__[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, __name__[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    __OverrideIfInt__public void writeMatrixBlockWithOffset(String objectPath, __name__[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, __name__[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    __OverrideIfInt__public void writeMatrixBlockWithOffset(String objectPath, __name__[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    __OverrideIfInt__public void writeMDArray(String objectPath, MD__Name__Array data);
+
+    /**
+     * Writes out a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void writeMDArray(String objectPath, MD__Name__Array data,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>__name__</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, IndexMap boundIndices);
+
+    /**
+     * Writes out a slice of a multi-dimensional <code>__name__</code> array. The slice is defined by
+     * "bound indices", each of which is fixed to a given value. The <var>data</var> object only  
+     * contains the free (i.e. non-fixed) indices.
+     * <p> 
+     * <i>Note:</i>The object identified by <var>objectPath</var> needs to exist when this method is 
+     * called. This method will <i>not</i> create the array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeMDArraySlice(String objectPath, MD__Name__Array data, long[] boundIndices);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    __OverrideIfInt__public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    __OverrideIfInt__public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>__name__</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createMDArray(String objectPath, int[] dimensions,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    __OverrideIfInt__public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    __OverrideIfInt__public void writeMDArrayBlock(String objectPath, MD__Name__Array data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>__name__</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MD__Name__Array data, long[] blockNumber,
+            IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>__name__</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     * @param boundIndices The mapping of indices to index values which should be bound. For example
+     *            a map of <code>new IndexMap().mapTo(2, 5).mapTo(4, 7)</code> has 2 and 4 as bound
+     *            indices and binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlock(String objectPath, MD__Name__Array data, long[] blockNumber,
+            long[] boundIndices);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    __OverrideIfInt__public void writeMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>__name__</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, IndexMap boundIndices);
+
+    /**
+     * Writes out a sliced block of a multi-dimensional <code>__name__</code> array. The slice is
+     * defined by "bound indices", each of which is fixed to a given value. The <var>data</var> 
+     * object only contains the free (i.e. non-fixed) indices.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set to start writing to in each dimension.
+     * @param boundIndices The array containing the values of the bound indices at the respective
+     *            index positions, and -1 at the free index positions. For example an array of
+     *            <code>new long[] { -1, -1, 5, -1, 7, -1 }</code> has 2 and 4 as bound indices and
+     *            binds them to the values 5 and 7, respectively.
+     */
+    public void writeSlicedMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset, long[] boundIndices);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    __OverrideIfInt__public void writeMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
\ No newline at end of file
diff --git a/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5UnsignedPrimitiveWriter.java.templ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5UnsignedPrimitiveWriter.java.templ
new file mode 100644
index 0000000..ad9def4
--- /dev/null
+++ b/sourceTest/java/ch/systemsx/cisd/hdf5/tools/IHDF5UnsignedPrimitiveWriter.java.templ
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2007 - 2014 ETH Zuerich, CISD and SIS.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MD__Name__Array;
+
+/**
+ * An interface that provides methods for writing unsigned <code>__name__</code> values to HDF5 files.
+ * <p>
+ * <i>Note:</i> This interface supports block access and sliced access (which is a special cases of 
+ * block access) to arrays. The performance of this block access can vary greatly depending on how 
+ * the data are layed out in the HDF5 file. For best performance, the block (or slice) dimension should 
+ * be chosen to be equal to the chunk dimensions of the array, as in this case the block written / read 
+ * are stored as consecutive value in the HDF5 file and one write / read access will suffice.   
+ * <p>
+ * <i>Note:</i> Use the methods in {@link UnsignedIntUtils} to convert from and to unsigned values.
+ * 
+ * @deprecated Use {@link IHDF5__Name__Writer} instead, it has all methods of this interface.
+ * 
+ * @author Bernd Rinn
+ */
+ at Deprecated
+public interface IHDF5Unsigned__Name__Writer extends IHDF5__Name__Reader
+{
+    // /////////////////////
+    // Attributes
+    // /////////////////////
+
+    /**
+     * Set a <code>__name__</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setAttr(String objectPath, String name, __name__ value);
+
+    /**
+     * Set a <code>__name__[]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setArrayAttr(String objectPath, String name, __name__[] value);
+
+    /**
+     * Set a multi-dimensional code>__name__</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMDArrayAttr(String objectPath, String name, MD__Name__Array value);
+
+    /**
+     * Set a <code>__name__[][]</code> attribute on the referenced object.
+     * <p>
+     * The referenced object must exist, that is it need to have been written before by one of the
+     * <code>write()</code> methods.
+     * 
+     * @param objectPath The name of the object to add the attribute to.
+     * @param name The name of the attribute.
+     * @param value The value of the attribute.
+     */
+    public void setMatrixAttr(String objectPath, String name, __name__[][] value);
+    
+    // /////////////////////
+    // Data Sets
+    // /////////////////////
+
+    /**
+     * Writes out a <code>__name__</code> value.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param value The value to write.
+     */
+    public void write(String objectPath, __name__ value);
+
+    /**
+     * Writes out a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     */
+    public void writeArray(String objectPath, __name__[] data);
+
+    /**
+     * Writes out a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param features The storage features of the data set.
+     */
+    public void writeArray(String objectPath, __name__[] data, 
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>size</var>. When the writer is
+     *            configured to <i>enforce</i> a non-extendable data set, the initial size equals the
+     *            total size and will be <var>size</var>.
+     */
+    public void createArray(String objectPath, int size);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the __name__ array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}).
+     */
+    public void createArray(String objectPath, long size, int blockSize);
+
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the <code>__name__</code> array to create. When <i>requesting</i> a 
+     *            chunked data set (e.g. {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>arraySize</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>arraySize</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>arraySize</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, int size,
+            HDF5__Classname__StorageFeatures features);
+    
+    /**
+     * Creates a <code>__name__</code> array (of rank 1).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param size The size of the __name__ array to create. When using extendable data sets 
+     *          ((see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()})), then no data 
+     *          set smaller than this size can be created, however data sets may be larger.
+     * @param blockSize The size of one block (for block-wise IO). Ignored if no extendable data 
+     *          sets are used (see {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}) and 
+     *                <code>features</code> is <code>HDF5__Classname__StorageFeature.__CLASSNAME___NO_COMPRESSION</code>.
+     * @param features The storage features of the data set.
+     */
+    public void createArray(String objectPath, long size, int blockSize,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>__name__</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5__Classname__StorageFeatures)}
+     * beforehand.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumber The number of the block to write.
+     */
+    public void writeArrayBlock(String objectPath, __name__[] data,
+            long blockNumber);
+
+    /**
+     * Writes out a block of a <code>__name__</code> array (of rank 1). The data set needs to have
+     * been created by {@link #createArray(String, long, int, HDF5__Classname__StorageFeatures)}
+     * beforehand.
+     * <p>
+     * Use this method instead of {@link #writeArrayBlock(String, __name__[], long)} if the
+     * total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param dataSize The (real) size of <code>data</code> (needs to be <code><= data.length</code>
+     *            )
+     * @param offset The offset in the data set to start writing to.
+     */
+    public void writeArrayBlockWithOffset(String objectPath, __name__[] data,
+            int dataSize, long offset);
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMatrix(String objectPath, __name__[][] data);
+
+    /**
+     * Writes out a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMatrix(String objectPath, __name__[][] data, 
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[])} on the different
+     *            meanings of this parameter.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of one block in the x dimension. See
+     *            {@link #createMDArray(String, int[], HDF5__Classname__StorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param sizeY The size of one block in the y dimension. See
+     *            {@link #createMDArray(String, int[], HDF5__Classname__StorageFeatures)} on the different
+     *            meanings of this parameter.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, int sizeX, int sizeY,
+    		HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the __name__ matrix to create.
+     * @param sizeY The size of the y dimension of the __name__ matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY);
+
+    /**
+     * Creates a <code>__name__</code> matrix (array of rank 2).
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param sizeX The size of the x dimension of the __name__ matrix to create.
+     * @param sizeY The size of the y dimension of the __name__ matrix to create.
+     * @param blockSizeX The size of one block in the x dimension.
+     * @param blockSizeY The size of one block in the y dimension.
+     * @param features The storage features of the data set.
+     */
+    public void createMatrix(String objectPath, long sizeX, long sizeY,
+            int blockSizeX, int blockSizeY, HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} if the total
+     * size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. The length defines the block size. Must not be
+     *            <code>null</code> or of length 0.
+     * @param blockNumberX The block number in the x dimension (offset: multiply with
+     *            <code>data.length</code>).
+     * @param blockNumberY The block number in the y dimension (offset: multiply with
+     *            <code>data[0.length</code>).
+     */
+    public void writeMatrixBlock(String objectPath, __name__[][] data,
+            long blockNumberX, long blockNumberY);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, __name__[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, __name__[][] data,
+            long offsetX, long offsetY);
+
+    /**
+     * Writes out a block of a <code>__name__</code> matrix (array of rank 2). The data set needs to
+     * have been created by
+     * {@link #createMatrix(String, long, long, int, int, HDF5__Classname__StorageFeatures)} beforehand.
+     * <p>
+     * Use this method instead of {@link #writeMatrixBlock(String, __name__[][], long, long)} if
+     * the total size of the data set is not a multiple of the block size.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write.
+     * @param dataSizeX The (real) size of <code>data</code> along the x axis (needs to be
+     *            <code><= data.length</code> )
+     * @param dataSizeY The (real) size of <code>data</code> along the y axis (needs to be
+     *            <code><= data[0].length</code> )
+     * @param offsetX The x offset in the data set to start writing to.
+     * @param offsetY The y offset in the data set to start writing to.
+     */
+    public void writeMatrixBlockWithOffset(String objectPath, __name__[][] data,
+            int dataSizeX, int dataSizeY, long offsetX, long offsetY);
+
+    /**
+     * Writes out a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     */
+    public void writeMDArray(String objectPath, MD__Name__Array data);
+
+    /**
+     * Writes out a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param features The storage features of the data set.
+     */
+    public void writeMDArray(String objectPath, MD__Name__Array data,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions When the writer is configured to use extendable data types (see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial dimensions
+     *            and the dimensions of a chunk of the array will be <var>dimensions</var>. When the 
+     *            writer is configured to <i>enforce</i> a non-extendable data set, the initial dimensions 
+     *            equal the dimensions and will be <var>dimensions</var>.
+     */
+    public void createMDArray(String objectPath, int[] dimensions);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the <code>__name__</code> array to create. When <i>requesting</i> 
+     *            a chunked data set (e.g. {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CHUNKED}), 
+     *            the initial size of the array will be 0 and the chunk size will be <var>dimensions</var>. 
+     *            When <i>allowing</i> a chunked data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___NO_COMPRESSION} when the writer is 
+     *            not configured to avoid extendable data types, see
+     *            {@link IHDF5WriterConfigurator#dontUseExtendableDataTypes()}), the initial size
+     *            and the chunk size of the array will be <var>dimensions</var>. When <i>enforcing</i> a 
+     *            on-extendable data set (e.g. 
+     *            {@link HDF5__Classname__StorageFeatures#__CLASSNAME___CONTIGUOUS}), the initial size equals 
+     *            the total size and will be <var>dimensions</var>.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, int[] dimensions,
+            HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Creates a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param dimensions The dimensions of the array.
+     * @param blockDimensions The dimensions of one block (chunk) of the array.
+     * @param features The storage features of the data set.
+     */
+    public void createMDArray(String objectPath, long[] dimensions,
+            int[] blockDimensions, HDF5__Classname__StorageFeatures features);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param blockNumber The block number in each dimension (offset: multiply with the extend in
+     *            the according dimension).
+     */
+    public void writeMDArrayBlock(String objectPath, MD__Name__Array data,
+            long[] blockNumber);
+
+    /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>. All columns need to have the
+     *            same length.
+     * @param offset The offset in the data set  to start writing to in each dimension.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            long[] offset);
+
+   /**
+     * Writes out a block of a multi-dimensional <code>__name__</code> array.
+     * 
+     * @param objectPath The name (including path information) of the data set object in the file.
+     * @param data The data to write. Must not be <code>null</code>.
+     * @param blockDimensions The dimensions of the block to write to the data set.
+     * @param offset The offset of the block in the data set to start writing to in each dimension.
+     * @param memoryOffset The offset of the block in the <var>data</var> array.
+     */
+    public void writeMDArrayBlockWithOffset(String objectPath, MD__Name__Array data,
+            int[] blockDimensions, long[] offset, int[] memoryOffset);
+}
\ No newline at end of file
diff --git a/sourceTest/java/test/hdf5lib/TestAll.java b/sourceTest/java/test/hdf5lib/TestAll.java
new file mode 100644
index 0000000..4beefa9
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestAll.java
@@ -0,0 +1,73 @@
+package test.hdf5lib;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.BeforeClass;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.base.utilities.ResourceUtilities;
+
+ at RunWith(Suite.class)
+ at Suite.SuiteClasses( { TestH5.class, 
+        TestH5Eregister.class, 
+        TestH5Edefault.class, 
+        TestH5E.class, 
+        TestH5Fparams.class, TestH5Fbasic.class, TestH5F.class, 
+        TestH5Gbasic.class, TestH5G.class, TestH5Giterate.class,
+        TestH5Sbasic.class, TestH5S.class, 
+        TestH5Tparams.class, TestH5Tbasic.class, TestH5T.class, 
+        TestH5Dparams.class, TestH5D.class, TestH5Dplist.class,
+        TestH5Lparams.class, TestH5Lbasic.class, TestH5Lcreate.class,
+        TestH5R.class, 
+        TestH5P.class, TestH5PData.class, TestH5Pfapl.class,
+        TestH5A.class, 
+        TestH5Oparams.class, TestH5Obasic.class, TestH5Ocopy.class, TestH5Ocreate.class,
+        TestH5Z.class
+})
+
+public class TestAll {
+    
+    @BeforeClass
+    public static void setUp() {
+        InputStream resourceStream = null;
+        try
+        {
+            final File dir = new File("sourceTest/java/test/hdf5lib");
+            if (dir.isDirectory() == false)
+            {
+                dir.mkdirs();
+            }
+            final File file = new File(dir, "h5ex_g_iterate.hdf");
+            if (file.exists() == false)
+            {
+                resourceStream = ResourceUtilities.class.getResourceAsStream("/h5ex_g_iterate.hdf");
+                if (resourceStream == null)
+                {
+                    throw new IllegalArgumentException("Resource 'h5ex_g_iterate.hdf' not found.");
+                }
+              final OutputStream fileStream = new FileOutputStream(file);
+              try
+              {
+                  IOUtils.copy(resourceStream, fileStream);
+                  fileStream.close();
+              } finally
+              {
+                  IOUtils.closeQuietly(fileStream);
+              }
+            }
+        } catch (final IOException ex)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+        } finally
+        {
+            IOUtils.closeQuietly(resourceStream);
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5.java b/sourceTest/java/test/hdf5lib/TestH5.java
new file mode 100644
index 0000000..8d9649a
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5.java
@@ -0,0 +1,230 @@
+/**
+ * 
+ */
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+
+import org.junit.Test;
+
+/**
+ * @author xcao
+ * 
+ */
+ at SuppressWarnings("all")
+public class TestH5 {
+
+    /**
+     * Test method for {@link ncsa.hdf.hdf5lib.H5#J2C(int)}.
+     */
+    @Test
+    public void testJ2C() {
+        int H5F_ACC_RDONLY = 0x0000;
+        int H5F_ACC_RDWR = 0x0001;
+        int H5F_ACC_TRUNC = 0x0002;
+        int H5F_ACC_EXCL = 0x0004;
+        int H5F_ACC_DEBUG = 0x0008;
+        int H5F_ACC_CREAT = 0x0010;
+        int H5F_OBJ_FILE = 0x0001;
+        int H5F_OBJ_DATASET = 0x0002;
+        int H5F_OBJ_GROUP = 0x0004;
+        int H5F_OBJ_DATATYPE = 0x0008;
+        int H5F_OBJ_ATTR = 0x0010;
+        int H5F_OBJ_ALL = H5F_OBJ_FILE | H5F_OBJ_DATASET | H5F_OBJ_GROUP
+                | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR;
+        int H5F_OBJ_LOCAL = 0x0020;
+
+        int definedValues[] = { H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_TRUNC,
+                H5F_ACC_EXCL, H5F_ACC_DEBUG, H5F_ACC_CREAT, H5F_OBJ_FILE,
+                H5F_OBJ_DATASET, H5F_OBJ_GROUP, H5F_OBJ_DATATYPE, H5F_OBJ_ATTR,
+                H5F_OBJ_ALL, H5F_OBJ_LOCAL };
+
+        int j2cValues[] = { HDF5Constants.H5F_ACC_RDONLY,
+                HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5F_ACC_EXCL, HDF5Constants.H5F_ACC_DEBUG,
+                HDF5Constants.H5F_ACC_CREAT, HDF5Constants.H5F_OBJ_FILE,
+                HDF5Constants.H5F_OBJ_DATASET, HDF5Constants.H5F_OBJ_GROUP,
+                HDF5Constants.H5F_OBJ_DATATYPE, HDF5Constants.H5F_OBJ_ATTR,
+                HDF5Constants.H5F_OBJ_ALL, HDF5Constants.H5F_OBJ_LOCAL };
+
+        for (int i = 0; i < definedValues.length; i++) {
+            assertEquals(definedValues[i], j2cValues[i]);
+        }
+
+        assertFalse(H5F_ACC_RDONLY == HDF5Constants.H5F_ACC_RDWR);
+        assertFalse(H5F_OBJ_FILE == HDF5Constants.H5F_OBJ_GROUP);
+
+    }
+
+    /**
+     * Test method for {@link ncsa.hdf.hdf5lib.H5#H5error_off()}.
+     */
+    @Test
+    public void testH5error_off() {
+        try {
+            H5.H5error_off();
+        }
+        catch (Throwable err) {
+            fail("H5.H5error_off failed: " + err);
+        }
+    }
+
+    /**
+     * Test method for {@link ncsa.hdf.hdf5lib.H5#H5open()}.
+     */
+    @Test
+    public void testH5open() {
+        try {
+            H5.H5open();
+        }
+        catch (Throwable err) {
+            fail("H5.H5open failed: " + err);
+        }
+    }
+
+    /**
+     * Test method for {@link ncsa.hdf.hdf5lib.H5#H5garbage_collect()}.
+     */
+    @Test
+    public void testH5garbage_collect() {
+        try {
+            H5.H5garbage_collect();
+        }
+        catch (Throwable err) {
+            fail("H5.H5garbage_collect failed: " + err);
+        }
+    }
+
+    /**
+     * Test method for
+     * {@link ncsa.hdf.hdf5lib.H5#H5set_free_list_limits(int, int, int, int, int, int)}
+     * .
+     */
+    @Test
+    public void testH5set_free_list_limits() {
+        int reg_global_lim = 1;
+        int reg_list_lim = 1;
+        int arr_global_lim = 1;
+        int arr_list_lim = 1;
+        int blk_global_lim = 1;
+        int blk_list_lim = 1;
+
+        try {
+            H5.H5set_free_list_limits(reg_global_lim, reg_list_lim,
+                    arr_global_lim, arr_list_lim, blk_global_lim, blk_list_lim);
+        }
+        catch (Throwable err) {
+            fail("H5.H5set_free_list_limits failed: " + err);
+        }
+    }
+
+    /**
+     * Test method for {@link ncsa.hdf.hdf5lib.H5#H5get_libversion(int[])}.
+     */
+    @Test
+    public void testH5get_libversion() {
+        int libversion[] = { 0, 0, 0 };
+
+        try {
+            H5.H5get_libversion(libversion);
+        }
+        catch (Throwable err) {
+            fail("H5.H5get_libversion: " + err);
+        }
+
+        for (int i = 0; i < 3; i++)
+            assertEquals(libversion[i], H5.LIB_VERSION[i]);
+
+        for (int i = 0; i < 3; i++)
+            assertFalse(libversion[i] == 0);
+    }
+
+    /**
+     * Test method for
+     * {@link ncsa.hdf.hdf5lib.H5#H5check_version(int, int, int)}.
+     */
+    @Test
+    public void testH5check_version() {
+        int majnum = 1, minnum = 8, relnum = 10;
+
+        try {
+            H5.H5check_version(majnum, minnum, relnum);
+        }
+        catch (Throwable err) {
+            fail("H5.H5check_version failed: " + err);
+        }
+
+        try {
+            H5.H5check_version(-1, 0, 0);
+        }
+        catch (Throwable err) {
+            fail("H5.H5check_version failed: " + err);
+        }
+    }
+    
+    @Test
+    public void testIsSerializable() {
+        H5 test = new H5();
+        ByteArrayOutputStream out = new ByteArrayOutputStream();
+        ObjectOutputStream oos;
+        try {
+            oos = new ObjectOutputStream(out);
+            oos.writeObject(test);
+            oos.close();
+        }
+        catch (IOException err) {
+            err.printStackTrace();
+            fail("ObjectOutputStream failed: " + err);
+        }
+        assertTrue(out.toByteArray().length > 0);
+
+    }
+    
+    @SuppressWarnings("static-access")
+    @Test 
+    public void serializeToDisk()
+    {
+        try {
+            H5 test = new H5();
+
+            FileOutputStream fos = new FileOutputStream("temph5.ser");
+            ObjectOutputStream oos = new ObjectOutputStream(fos);
+            oos.writeObject(test);
+            oos.close();
+        }
+        catch (Exception ex) {
+            fail("Exception thrown during test: " + ex.toString());
+        }
+        
+        try {
+            FileInputStream fis = new FileInputStream("temph5.ser");
+            ObjectInputStream ois = new ObjectInputStream(fis);
+            H5 test = (ncsa.hdf.hdf5lib.H5) ois.readObject();
+            ois.close();
+            
+            assertTrue("H5.LIB_VERSION[0]", test.LIB_VERSION[0]==H5.LIB_VERSION[0]);
+            assertTrue("H5.LIB_VERSION[1]", test.LIB_VERSION[1]==H5.LIB_VERSION[1]);
+            assertTrue("H5.LIB_VERSION[2]", test.LIB_VERSION[2]==H5.LIB_VERSION[2]);
+            
+            // Clean up the file
+            new File("temph5.ser").delete();
+        }
+        catch (Exception ex) {
+            fail("Exception thrown during test: " + ex.toString());
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5A.java b/sourceTest/java/test/hdf5lib/TestH5A.java
new file mode 100644
index 0000000..02a0fd4
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5A.java
@@ -0,0 +1,809 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5A_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5A {
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+    int type_id = -1;
+    int space_id = -1;
+    int lapl_id = -1;
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createDataset: ", did > 0);
+
+        return did;
+    }
+
+    @Before
+    public void createH5file() throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0);
+            
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            assertTrue("TestH5D.createH5file: H5.H5Fcreate: ", H5fid > 0);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+            H5did = _createDataset(H5fid, H5dsid, "dset",
+                    HDF5Constants.H5P_DEFAULT);
+            assertTrue("TestH5D.createH5file: _createDataset: ", H5did > 0);
+            space_id = H5.H5Screate(HDF5Constants.H5S_NULL);
+            assertTrue(space_id > 0);
+            lapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+            assertTrue(lapl_id > 0);
+            type_id = H5.H5Tenum_create(HDF5Constants.H5T_STD_I32LE);
+            assertTrue(type_id > 0);
+            int status = H5.H5Tenum_insert(type_id, "test", 1);
+            assertTrue(status >= 0);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+    
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5dsid > 0)
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did > 0)
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+        if (H5fid > 0)
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+
+        if (type_id > 0)
+            try {H5.H5Tclose(type_id);} catch (Exception ex) {}
+        if (space_id > 0)
+            try {H5.H5Sclose(space_id);} catch (Exception ex) {}
+        if (lapl_id > 0)
+            try {H5.H5Pclose(lapl_id);} catch (Exception ex) {}
+    }
+    
+    @Test
+    public void testH5Acreate2() {
+        int attr_id = -1;
+        try {
+            attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Acreate2", attr_id >= 0);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Acreate2: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Acreate2_invalidobject() throws Throwable {
+        H5.H5Acreate(H5dsid, "dset", type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Acreate2_nullname() throws Throwable {
+        H5.H5Acreate(H5did, null, type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Aopen() {
+        String attr_name = "dset";
+        int attribute_id = -1;
+        int attr_id = -1;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, attr_name, type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+            // Opening the existing attribute, attr_name(Created by H5ACreate2)
+            // attached to an object identifier.
+            attribute_id = H5.H5Aopen(H5did, attr_name,
+                    HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Aopen: H5Aopen", attribute_id >= 0);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aopen: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Aopen_invalidname() throws Throwable {
+        H5.H5Aopen(H5did, "attr_name", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Aopen_by_idx() {
+        int loc_id = H5did;
+        String obj_name = ".";
+        int idx_type = HDF5Constants.H5_INDEX_CRT_ORDER;
+        int order = HDF5Constants.H5_ITER_INC;
+        long n = 0;
+        int attr_id = -1;
+        int attribute_id = -1;
+        int aapl_id = HDF5Constants.H5P_DEFAULT;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, "file", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+            // Opening the existing attribute, obj_name(Created by H5ACreate2)
+            // by index, attached to an object identifier.
+            attribute_id = H5.H5Aopen_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC,
+                    0, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+            assertTrue("testH5Aopen_by_idx: H5Aopen_by_idx", attribute_id >= 0);
+
+            // Negative test- Error should be thrown when H5Aopen_by_idx is
+            // called
+            // with n=5 and we do not have 5 attributes created.
+            try {
+                n = 5;
+                H5.H5Aopen_by_idx(loc_id, obj_name, idx_type, order, n,
+                        aapl_id, lapl_id);
+                fail("Negative Test Failed:- Error not Thrown when n is invalid.");
+            } 
+            catch (AssertionError err) {
+                fail("H5.H5Aopen_by_idx: " + err);
+            } 
+            catch (HDF5LibraryException err) {}
+
+            // Negative test- Error should be thrown when H5Aopen_by_idx is
+            // called
+            // with an invalid object name(which hasn't been created).
+            try {
+                n = 0;
+                obj_name = "file";
+                H5.H5Aopen_by_idx(loc_id, obj_name, idx_type, order, n,
+                        aapl_id, lapl_id);
+                fail("Negative Test Failed:- Error not Thrown when attribute name is invalid.");
+            } 
+            catch (AssertionError err) {
+                fail("H5.H5Aopen_by_idx: " + err);
+            } 
+            catch (HDF5LibraryException err) {}
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aopen_by_idx: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Acreate_by_name() {
+        String obj_name = ".";
+        String attr_name = "DATASET";
+        int attribute_id = -1;
+        boolean bool_val = false;
+
+        try {
+            attribute_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name,
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            assertTrue("testH5Acreate_by_name: H5Acreate_by_name",
+                    attribute_id >= 0);
+
+            // Check if the name of attribute attached to the object specified
+            // by loc_id and obj_name exists.It should be true.
+            bool_val = H5.H5Aexists_by_name(H5fid, obj_name, attr_name,
+                    lapl_id);
+            assertTrue("testH5Acreate_by_name: H5Aexists_by_name",
+                    bool_val == true);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Acreate_by_name " + err);
+        } 
+        finally {
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Arename() throws Throwable, HDF5LibraryException, NullPointerException {
+        int loc_id = H5fid;
+        String old_attr_name = "old";
+        String new_attr_name = "new";
+        int attr_id = -1;
+        int ret_val = -1;
+        boolean bool_val = false;
+
+        try {
+            attr_id = H5.H5Acreate(loc_id, old_attr_name, type_id, space_id, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+            ret_val = H5.H5Arename(loc_id, old_attr_name, new_attr_name);
+
+            // Check the return value.It should be non negative.
+            assertTrue("testH5Arename: H5Arename", ret_val >= 0);
+
+            // Check if the new name of attribute attached to the object
+            // specified by loc_id and obj_name exists.It should be true.
+            bool_val = H5.H5Aexists(loc_id, new_attr_name);
+            assertTrue("testH5Arename: H5Aexists", bool_val == true);
+
+            // Check if the old name of attribute attached to the object
+            // specified by loc_id and obj_name exists. It should equal false.
+            bool_val = H5.H5Aexists(loc_id, old_attr_name);
+            assertEquals(bool_val, false);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Arename " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Arename_by_name() {
+        int loc_id = H5fid;
+        String obj_name = ".";
+        String old_attr_name = "old";
+        String new_attr_name = "new";
+        int attr_id = -1;
+        int ret_val = -1;
+        boolean bool_val = false;
+
+        try {
+            attr_id = H5.H5Acreate_by_name(loc_id, obj_name, old_attr_name,
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+            ret_val = H5.H5Arename_by_name(loc_id, obj_name, old_attr_name,
+                    new_attr_name, lapl_id);
+
+            // Check the return value.It should be non negative.
+            assertTrue("testH5Arename_by_name: H5Arename_by_name", ret_val >= 0);
+
+            // Check if the new name of attribute attached to the object
+            // specified by loc_id and obj_name exists.It should be true.
+            bool_val = H5.H5Aexists_by_name(loc_id, obj_name, new_attr_name,
+                    lapl_id);
+            assertTrue("testH5Arename_by_name: H5Aexists_by_name",
+                    bool_val == true);
+
+            // Check if the old name of attribute attached to the object
+            // specified by loc_id and obj_name exists. It should equal false.
+            bool_val = H5.H5Aexists_by_name(loc_id, obj_name, old_attr_name,
+                    lapl_id);
+            assertEquals(bool_val, false);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Arename_by_name " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Aget_name_by_idx() {
+        int loc_id = H5fid;
+        String obj_name = ".";
+        String attr_name = "DATASET1", attr2_name = "DATASET2";
+        String ret_name = null;
+        int idx_type = HDF5Constants.H5_INDEX_NAME;
+        int order = HDF5Constants.H5_ITER_INC;
+        int n = 0;
+        int attr1_id = -1;
+        int attr2_id = -1;
+
+        try {
+            attr1_id = H5.H5Acreate_by_name(loc_id, obj_name, attr_name,
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr2_id = H5.H5Acreate_by_name(loc_id, obj_name, attr2_name,
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+
+            // getting the 1st attribute name(n=0).
+            ret_name = H5.H5Aget_name_by_idx(loc_id, obj_name, idx_type, order,
+                    n, lapl_id);
+            assertFalse("H5Aget_name_by_idx ", ret_name == null);
+            assertEquals(ret_name, attr_name);
+
+            // getting the second attribute name(n=1)
+            ret_name = H5.H5Aget_name_by_idx(loc_id, obj_name, idx_type, order,
+                    1, lapl_id);
+            assertFalse("H5Aget_name_by_idx ", ret_name == null);
+            assertEquals(ret_name, attr2_name);
+
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_name_by_idx " + err);
+        } 
+        finally {
+            if (attr1_id > 0)
+                try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+            if (attr2_id > 0)
+                try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Aget_storage_size() {
+        int attr_id = -1;
+        long attr_size = -1;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+            attr_size = H5.H5Aget_storage_size(attr_id);
+            assertTrue("The size of attribute is :", attr_size == 0);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_storage_size: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Aget_info() {
+        H5A_info_t attr_info = null;
+        int attribute_id = -1;
+        int attr_id = -1;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            attribute_id = H5.H5Aopen(H5did, "dset", HDF5Constants.H5P_DEFAULT);
+            // Calling H5Aget_info with attribute_id returned from H5Aopen.
+            attr_info = H5.H5Aget_info(attribute_id);
+            assertFalse("H5Aget_info ", attr_info == null);
+            assertTrue("Corder_Valid should be false",
+                    attr_info.corder_valid == false);
+            assertTrue("Character set used for attribute name",
+                    attr_info.cset == HDF5Constants.H5T_CSET_ASCII);
+            assertTrue("Corder ", attr_info.corder == 0);
+            assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_info: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Aget_info1() {
+        H5A_info_t attr_info = null;
+        int attribute_id = -1;
+        int attr_id = -1;
+        int order = HDF5Constants.H5_ITER_INC;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, ".", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            attribute_id = H5.H5Aopen_by_idx(H5did, ".",
+                    HDF5Constants.H5_INDEX_CRT_ORDER, order, 0,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            // Calling H5Aget_info with attribute_id returned from
+            // H5Aopen_by_idx.
+            attr_info = H5.H5Aget_info(attribute_id);
+
+            assertFalse("H5Aget_info ", attr_info == null);
+            assertTrue("Corder_Valid should be true",
+                    attr_info.corder_valid == true);
+            assertTrue("Character set",
+                    attr_info.cset == HDF5Constants.H5T_CSET_ASCII);
+            assertTrue("Corder ", attr_info.corder == 0);
+            assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_info1: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Aget_info_by_idx() {
+        int attr_id = -1;
+        int attr2_id = -1;;
+        H5A_info_t attr_info = null;
+
+        try {
+            attr_id = H5.H5Acreate(H5did, "dset1", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            attr2_id = H5.H5Acreate(H5did, "dataset2", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            
+            //Verify info for 1st attribute, in increasing creation order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder ", attr_info.corder == 0);//should equal 0 as this is the order of 1st attribute created.
+            assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+
+            //Verify info for 2nd attribute, in increasing creation order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder", attr_info.corder == 1);
+            assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr2_id));
+
+            //verify info for 2nd attribute, in decreasing creation order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_DEC, 0, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder", attr_info.corder == 1); //should equal 1 as this is the order of 2nd attribute created.
+
+            //verify info for 1st attribute, in decreasing creation order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_DEC, 1, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder", attr_info.corder == 0); //should equal 0 as this is the order of 1st attribute created.
+
+            //verify info for 1st attribute, in increasing name order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 1, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder", attr_info.corder == 0); //should equal 0 as this is the order of 1st attribute created.
+
+            //verify info for 2nd attribute, in decreasing name order
+            attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_DEC, 1, lapl_id);
+            assertNotNull(attr_info);
+            assertTrue("Corder", attr_info.corder == 1); //should equal 1 as this is the order of 2nd attribute created.
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_info_by_idx:" + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attr2_id > 0)
+                try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Aget_info_by_name() {
+        int attr_id = -1;
+        H5A_info_t attr_info = null;
+        String obj_name = ".";
+        String attr_name = "DATASET";
+
+        try {
+            attr_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name, type_id,
+                    space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr_info = H5.H5Aget_info_by_name(H5fid, obj_name, attr_name,
+                    lapl_id);
+            assertNotNull(attr_info);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aget_info_by_name:" + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Adelete_by_name() {
+        int attr_id = -1;
+        int ret_val = -1;
+        boolean bool_val = false;
+        boolean exists = false;
+
+        try {
+            attr_id = H5.H5Acreate_by_name(H5fid, ".", "DATASET",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            ret_val = H5.H5Adelete_by_name(H5fid, ".", "DATASET", lapl_id);
+            assertTrue("H5Adelete_by_name", ret_val >= 0);
+
+            // Check if the Attribute still exists.
+            bool_val = H5.H5Aexists_by_name(H5fid, ".", "DATASET",
+                    lapl_id);
+            assertFalse("testH5Adelete_by_name: H5Aexists_by_name", bool_val);
+            exists = H5.H5Aexists(H5fid, "DATASET");
+            assertFalse("testH5Adelete_by_name: H5Aexists ",exists);
+
+            // Negative test. Error thrown when we try to delete an attribute
+            // that has already been deleted.
+            try{
+                ret_val = H5.H5Adelete_by_name(H5fid, ".", "DATASET", lapl_id);
+                fail("Negative Test Failed: Error Not thrown.");
+            } 
+            catch (AssertionError err) {
+                fail("H5.H5Adelete_by_name: " + err);
+            } 
+            catch (HDF5LibraryException err) {}
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Adelete_by_name " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Aexists() {
+        boolean exists = false;
+        int attr_id = -1;
+        int attribute_id = -1;
+
+        try {
+            exists = H5.H5Aexists(H5fid, "None");
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aexists: " + err);
+        }
+        assertFalse("H5Aexists ", exists);
+
+        try {
+            attr_id = H5.H5Acreate(H5fid, "dset", type_id, space_id,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            exists = H5.H5Aexists(H5fid, "dset");
+            assertTrue("H5Aexists ", exists);
+
+            attribute_id = H5.H5Acreate_by_name(H5fid, ".", "attribute",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            exists = H5.H5Aexists(H5fid, "attribute");
+            assertTrue("H5Aexists ", exists);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aexists: " + err);
+        } 
+        finally {
+            if (attr_id > 0)
+                try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Adelete_by_idx_order() {
+        boolean exists = false;
+        int attr1_id = -1;
+        int attr2_id = -1;
+        int attr3_id = -1;
+        int attr4_id = -1;
+        
+        try {
+            attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+                    
+            H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 3, lapl_id);
+            exists = H5.H5Aexists(H5fid, "attribute4");
+            assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Adelete_by_idx: " + err);
+        } 
+        finally {
+            if (attr1_id > 0)
+                try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+            if (attr2_id > 0)
+                try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+            if (attr3_id > 0)
+                try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+            if (attr4_id > 0)
+                try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Adelete_by_idx_name1() {
+        boolean exists = false;
+        int attr1_id = -1;
+        int attr2_id = -1;
+        int attr3_id = -1;
+        
+        try {
+            attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);        
+            H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, lapl_id);
+            exists = H5.H5Aexists(H5fid, "attribute3");
+            assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Adelete_by_idx: " + err);
+        } 
+        finally {
+            if (attr1_id > 0)
+                try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+            if (attr2_id > 0)
+                try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+            if (attr3_id > 0)
+                try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Adelete_by_idx_name2() {
+        boolean exists = false;
+        int attr1_id = -1;
+        int attr2_id = -1;
+        int attr3_id = -1;
+        int attr4_id = -1;
+        
+        try {
+            attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+            attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+                    
+            H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_DEC, 3, lapl_id);
+            exists = H5.H5Aexists(H5fid, "attribute1");
+            assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Adelete_by_idx: " + err);
+        } 
+        finally {
+            if (attr1_id > 0)
+                try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+            if (attr2_id > 0)
+                try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+            if (attr3_id > 0)
+                try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+            if (attr4_id > 0)
+                try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Adelete_by_idx_null() throws Throwable {
+        H5.H5Adelete_by_idx(H5fid, null, HDF5Constants.H5_INDEX_CRT_ORDER,
+                HDF5Constants.H5_ITER_INC, 0, lapl_id);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Adelete_by_idx_invalidobject() throws Throwable {
+        H5.H5Adelete_by_idx(H5fid, "invalid", HDF5Constants.H5_INDEX_CRT_ORDER,
+                HDF5Constants.H5_ITER_INC, 0, lapl_id);
+    }
+    
+    @Test
+    public void testH5Aopen_by_name() {
+        String obj_name = ".";
+        String attr_name = "DATASET";
+        int attribute_id = -1;
+        int aid = -1;
+
+        try {
+            attribute_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name,
+                    type_id, space_id, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, lapl_id);
+
+            //open Attribute by name
+            if(attribute_id >= 0) {
+                try {
+                    aid = H5.H5Aopen_by_name(H5fid, obj_name, attr_name, HDF5Constants.H5P_DEFAULT, lapl_id);
+                    assertTrue("testH5Aopen_by_name: ", aid>=0);
+                }
+                catch(Throwable err) {
+                    err.printStackTrace();
+                    fail("H5.H5Aopen_by_name " + err);
+                }
+            }
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Aopen_by_name " + err);
+        } 
+        finally {
+            if (aid > 0)
+                try {H5.H5Aclose(aid);} catch (Exception ex) {}
+            if (attribute_id > 0)
+                try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+        }
+    }
+
+}
+
+
+
+    
\ No newline at end of file
diff --git a/sourceTest/java/test/hdf5lib/TestH5D.java b/sourceTest/java/test/hdf5lib/TestH5D.java
new file mode 100644
index 0000000..d2df32f
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5D.java
@@ -0,0 +1,856 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.HDFNativeData;
+import ncsa.hdf.hdf5lib.callbacks.H5D_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5D_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5D {
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    private static final int RANK = 2;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5dtid = -1;
+    int H5did = -1;
+    int H5did0 = -1;
+    int H5dcpl_id = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+
+    // Values for the status of space allocation
+    enum H5D_space_status {
+        H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(
+                1), H5D_SPACE_STATUS_ALLOCATED(2);
+
+        private int code;
+
+        H5D_space_status(int space_status) {
+            this.code = space_status;
+        }
+
+        public int getCode() {
+            return this.code;
+        }
+    }
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();}
+            catch (SecurityException e) {}
+        }
+    }
+
+    private final void _createPDataset(int fid, int dsid, String name, int dcpl_val) {
+        
+        try {
+            H5dcpl_id = H5.H5Pcreate(dcpl_val);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Pcreate: " + err);
+        }
+        assertTrue("testH5D._createPDataset: H5.H5Pcreate: ", H5dcpl_id > 0);
+
+        // Set the allocation time to "early". This way we can be sure
+        // that reading from the dataset immediately after creation will
+        // return the fill value.
+        try {
+            H5.H5Pset_alloc_time(H5dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        try {
+            H5did0 = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, H5dcpl_id, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createPDataset: ", H5did0 > 0);
+    }
+
+    private final void _createDataset(int fid, int dsid, String name, int dapl) {
+        try {
+            H5did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createDataset: ", H5did > 0);
+    }
+
+    private final void _createVLDataset(int fid, int dsid, String name, int dapl) {
+        try {
+            H5dtid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            H5.H5Tset_size(H5dtid, HDF5Constants.H5T_VARIABLE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Tcopy: " + err);
+        }
+        assertTrue("TestH5D._createVLDataset: ", H5dtid > 0);
+        try {
+            H5did = H5.H5Dcreate(fid, name, H5dtid, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createVLDataset: ", H5did > 0);
+    }
+    
+    private final void _closeH5file() throws HDF5LibraryException {
+        if (H5dcpl_id >= 0)
+            try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+        if (H5did0 >= 0)
+            try {H5.H5Dclose(H5did0);} catch (Exception ex) {}
+        if (H5did >= 0)
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+        if (H5dtid > 0) 
+            try {H5.H5Tclose(H5dtid);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+    }
+
+    private final void _openH5file(String name, int dapl) {
+       try {
+           H5fid = H5.H5Fopen(H5_FILE,
+                   HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+           H5did = H5.H5Dopen(H5fid, name, dapl);
+           H5dsid = H5.H5Dget_space(H5did);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("TestH5D._openH5file: " + err);
+       }
+       assertTrue("TestH5D._openH5file: H5.H5Fopen: ",H5fid > 0);
+       assertTrue("TestH5D._openH5file: H5.H5Screate_simple: ",H5dsid > 0);
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+       assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(RANK, H5dims, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5dcpl_id >= 0)
+            try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+        if (H5did0 >= 0)
+            try {H5.H5Dclose(H5did0);} catch (Exception ex) {}
+        if (H5did >= 0)
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+        if (H5dtid > 0) 
+            try {H5.H5Tclose(H5dtid);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+    }
+
+    @Test
+    public void testH5Dcreate() {
+        int dataset_id = -1;
+        try {
+            dataset_id = H5.H5Dcreate(H5fid, "dset",
+                HDF5Constants.H5T_STD_I32BE, H5dsid,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dcreate: " + err);
+        }
+        assertTrue(dataset_id > 0);
+
+        // End access to the dataset and release resources used by it.
+        try {
+            if (dataset_id >= 0)
+                H5.H5Dclose(dataset_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+    }
+
+    @Test
+    public void testH5Dcreate_anon() {
+        int dataset_id = -1;
+        try {
+            dataset_id = H5.H5Dcreate_anon(H5fid, HDF5Constants.H5T_STD_I32BE, 
+                    H5dsid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dcreate_anon: " + err);
+        }
+        assertTrue(dataset_id > 0);
+
+        // End access to the dataset and release resources used by it.
+        try {
+            if (dataset_id >= 0)
+                H5.H5Dclose(dataset_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+    }
+
+    @Test
+    public void testH5Dopen() {
+        int dataset_id = -1;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            H5.H5Dclose(H5did);
+            H5did = -1;
+            dataset_id = H5.H5Dopen(H5fid, "dset", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dopen: " + err);
+        }
+        assertTrue("testH5Dopen: ", dataset_id > 0);
+
+        // End access to the dataset and release resources used by it.
+        try {
+            if (dataset_id >= 0)
+                H5.H5Dclose(dataset_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+    }
+
+    @Test
+    public void testH5Dget_storage_size_empty() {
+        long storage_size = 0;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            storage_size = H5.H5Dget_storage_size(H5did);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dget_storage_size: " + err);
+        }
+        assertTrue("testH5Dget_storage_size: ", storage_size == 0);
+    }
+
+    @Test
+    public void testH5Dget_storage_size() {
+        long storage_size = 0;
+        int[][] dset_data = new int[DIM_X][DIM_Y];
+        int FILLVAL = 99;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+        // Initialize the dataset.
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++)
+                dset_data[indx][jndx] = FILLVAL;
+ 
+        try {
+            if (H5did >= 0)
+                H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+                        HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                        HDF5Constants.H5P_DEFAULT, dset_data[0]);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        
+        try {
+            storage_size = H5.H5Dget_storage_size(H5did);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dget_storage_size: " + err);
+        }
+        assertTrue("testH5Dget_storage_size: "+storage_size, storage_size == DIM_X*DIM_Y*4);
+    }
+
+    @Test
+    public void testH5Dget_access_plist() {
+        int dapl_id = -1;
+        int pequal = -1;
+        int test_dapl_id = -1;
+        
+        try {
+            test_dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dget_access_plist: H5.H5Pcreate: " + err);
+        }
+        assertTrue("testH5Dget_access_plist: test_dapl_id: ", test_dapl_id > 0);
+       
+        _createDataset(H5fid, H5dsid, "dset", test_dapl_id);
+        
+        try {
+            dapl_id = H5.H5Dget_access_plist(H5did);
+            assertTrue("testH5Dget_access_plist: dapl_id: ", dapl_id > 0);
+            pequal = H5.H5Pequal(dapl_id, test_dapl_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("testH5Dget_access_plist: H5.H5Dget_access_plist: " + err);
+        }
+
+        // End access to the dataset and release resources used by it.
+        try {
+            if (dapl_id >= 0)
+                H5.H5Pclose(dapl_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+        try {
+            if (test_dapl_id >= 0)
+                H5.H5Pclose(test_dapl_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+        assertTrue("testH5Dget_access_plist: ", pequal > 0);
+    }
+    
+    @Test
+    public void testH5Dget_space_status() {
+        int[][] write_dset_data = new int[DIM_X][DIM_Y];
+        int[] space_status = new int[1];
+        int[] space_status0 = new int[1];
+
+        // Initialize the dataset.
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++)
+                write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+        _createPDataset(H5fid, H5dsid, "dset0", HDF5Constants.H5P_DATASET_CREATE);
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+        // Retrieve and print space status and storage size for dset0.
+        try {
+            H5.H5Dget_space_status(H5did0, space_status0);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        assertTrue("testH5Dget_space_status0 - H5.H5Dget_space_status: ", space_status0[0] == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+
+        // Retrieve and print space status and storage size for dset.
+        try {
+            H5.H5Dget_space_status(H5did, space_status);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        assertFalse("testH5Dget_space_status - H5.H5Dget_space_status: ", space_status[0] == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+
+        // Write the data to the dataset.
+        try {
+            H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+                        HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                        HDF5Constants.H5P_DEFAULT, write_dset_data);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Retrieve and print space status and storage size for dset.
+        try {
+            H5.H5Dget_space_status(H5did, space_status);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        assertTrue("testH5Dget_space_status - H5.H5Dget_space_status: ", space_status[0] == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_space_closed() throws Throwable {
+        int dataset_id = -1;
+        try {
+            dataset_id = H5.H5Dcreate(H5fid, "dset",
+                        HDF5Constants.H5T_STD_I32BE, H5dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D.testH5Dget_space_closed: ", dataset_id > 0);
+        H5.H5Dclose(dataset_id);
+        
+        H5.H5Dget_space(dataset_id);
+    }
+
+    @Test
+    public void testH5Dget_space() {
+        int dataspace_id = -1;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            dataspace_id = H5.H5Dget_space(H5did);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dget_space: " + err);
+        }
+        assertTrue("TestH5D.testH5Dget_space: ", dataspace_id > 0);
+
+        // End access to the dataspace and release resources used by it.
+        try {
+            if (dataspace_id >= 0)
+                H5.H5Sclose(dataspace_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_type_closed() throws Throwable {
+        int dataset_id = -1;
+        try {
+            dataset_id = H5.H5Dcreate(H5fid, "dset",
+                        HDF5Constants.H5T_STD_I32BE, H5dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D.testH5Dget_type_closed: ", dataset_id > 0);
+        H5.H5Dclose(dataset_id);
+        
+        H5.H5Dget_type(dataset_id);
+    }
+
+    @Test
+    public void testH5Dget_type() {
+        int datatype_id = -1;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            datatype_id = H5.H5Dget_type(H5did);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dget_type: " + err);
+        }
+        assertTrue("TestH5D.testH5Dget_type: ", datatype_id > 0);
+
+        // End access to the datatype and release resources used by it.
+        try {
+            if (datatype_id >= 0)
+                H5.H5Tclose(datatype_id);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+        }
+    }
+
+    @Test
+    public void testH5Dget_offset() {
+        int[][] write_dset_data = new int[DIM_X][DIM_Y];
+        long dset_address = 0;
+        _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            // Test dataset address.  Should be undefined.
+            dset_address = H5.H5Dget_offset(H5did);
+        }
+        catch (HDF5LibraryException hdfex) {
+            ;
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dget_offset: " + err);
+        }
+        // Write the data to the dataset.
+        try {
+            H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+                        HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                        HDF5Constants.H5P_DEFAULT, write_dset_data);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        
+        try {
+            // Test dataset address.
+            dset_address = H5.H5Dget_offset(H5did);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dget_offset: " + err);
+        }
+        
+        assertTrue("TestH5D.testH5Dget_offset: ", dset_address > 0);
+    }
+
+    @Test
+    public void testH5Dfill_null() {
+        int[] buf_data = new int[DIM_X*DIM_Y];
+        
+        // Initialize memory buffer
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+            }
+        byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+        
+        // Fill selection in memory
+        try {
+            H5.H5Dfill(null, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dfill: " + err);
+        }
+        buf_data = HDFNativeData.byteToInt(buf_array);
+
+        // Verify memory buffer the hard way
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++)
+                assertTrue("H5.H5Dfill: [" + indx+","+jndx + "] ", buf_data[(indx * DIM_Y) + jndx] == 0);
+    }
+
+    @Test
+    public void testH5Dfill() {
+        int[] buf_data = new int[DIM_X*DIM_Y];
+        byte[] fill_value = HDFNativeData.intToByte(254);
+        
+        // Initialize memory buffer
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+            }
+        byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+        
+        // Fill selection in memory
+        try {
+            H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Dfill: " + err);
+        }
+        buf_data = HDFNativeData.byteToInt(buf_array);
+
+        // Verify memory buffer the hard way
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++)
+                assertTrue("H5.H5Dfill: [" + indx+","+jndx + "] ", buf_data[(indx * DIM_Y) + jndx] == 254);
+    }
+
+    @Test
+    public void testH5Diterate() {
+        final int SPACE_RANK = 2;
+        final int SPACE_FILL = 254;
+        
+        class H5D_iter_data implements H5D_iterate_t {
+            public int fill_value;             /* The fill value to check */
+            public long fill_curr_coord;          /* Current coordinate to examine */
+            public long[] fill_coords;            /* Pointer to selection's coordinates */
+        }
+        
+        H5D_iterate_t iter_data = new H5D_iter_data();
+
+        class H5D_iter_callback implements H5D_iterate_cb {
+            public int callback(byte[] elem_buf, int elem_id, int ndim, long[] point, H5D_iterate_t op_data) {
+                //Check value in current buffer location
+                int element = HDFNativeData.byteToInt(elem_buf, 0);
+                if(element != ((H5D_iter_data)op_data).fill_value)
+                    return -1;
+                //Check number of dimensions
+                if(ndim != SPACE_RANK)
+                    return(-1);
+                //Check Coordinates
+                long[] fill_coords = new long[2];
+                fill_coords[0] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord)];
+                fill_coords[1] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord) + 1];
+                ((H5D_iter_data)op_data).fill_curr_coord++;
+                if(fill_coords[0] != point[0])
+                    return(-1);
+                if(fill_coords[1] != point[1])
+                    return(-1);
+                
+                return(0);
+            }
+        }
+        
+        int[] buf_data = new int[DIM_X*DIM_Y];
+        byte[] fill_value = HDFNativeData.intToByte(SPACE_FILL);
+        
+        // Initialize memory buffer
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+            }
+        byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+        
+        // Fill selection in memory
+        try {
+            H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Diterate: " + err);
+        }
+
+        // Initialize the iterator structure
+        ((H5D_iter_data)iter_data).fill_value = SPACE_FILL;
+        ((H5D_iter_data)iter_data).fill_curr_coord = 0;
+        // Set the coordinates of the selection
+        ((H5D_iter_data)iter_data).fill_coords = new long[DIM_X*DIM_Y*SPACE_RANK];   /* Coordinates of selection */
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx)] = indx;
+                ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx) + 1] = jndx;
+            } /* end for */
+
+        // Iterate through selection, verifying correct data
+        H5D_iterate_cb iter_cb = new H5D_iter_callback();
+        int op_status = -1;
+        try {
+            op_status = H5.H5Diterate(buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Diterate: " + err);
+        }
+        assertTrue("H5Diterate ", op_status == 0);
+    }
+
+    @Test
+    public void testH5Diterate_write() {
+        final int SPACE_RANK = 2;
+        final int SPACE_FILL = 254;
+        
+        class H5D_iter_data implements H5D_iterate_t {
+            public int fill_value;             /* The fill value to check */
+            public long fill_curr_coord;          /* Current coordinate to examine */
+            public long[] fill_coords;            /* Pointer to selection's coordinates */
+        }
+        
+        H5D_iterate_t iter_data = new H5D_iter_data();
+
+        class H5D_iter_callback implements H5D_iterate_cb {
+            public int callback(byte[] elem_buf, int elem_id, int ndim, long[] point, H5D_iterate_t op_data) {
+                //Check value in current buffer location
+                int element = HDFNativeData.byteToInt(elem_buf, 0);
+                if(element != ((H5D_iter_data)op_data).fill_value)
+                    return -1;
+                //Check number of dimensions
+                if(ndim != SPACE_RANK)
+                    return(-1);
+                //Check Coordinates
+                long[] fill_coords = new long[2];
+                fill_coords[0] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord)];
+                fill_coords[1] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord) + 1];
+                ((H5D_iter_data)op_data).fill_curr_coord++;
+                if(fill_coords[0] != point[0])
+                    return(-1);
+                if(fill_coords[1] != point[1])
+                    return(-1);
+                element -= 128;
+                byte[] new_elembuf = HDFNativeData.intToByte(element);
+                elem_buf[0] = new_elembuf[0];
+                elem_buf[1] = new_elembuf[1];
+                elem_buf[2] = new_elembuf[2];
+                elem_buf[3] = new_elembuf[3];
+                return(0);
+            }
+        }
+        
+        int[] buf_data = new int[DIM_X*DIM_Y];
+        byte[] fill_value = HDFNativeData.intToByte(SPACE_FILL);
+        
+        // Initialize memory buffer
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+            }
+        byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+        
+        // Fill selection in memory
+        try {
+            H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Diterate: " + err);
+        }
+
+        // Initialize the iterator structure
+        ((H5D_iter_data)iter_data).fill_value = SPACE_FILL;
+        ((H5D_iter_data)iter_data).fill_curr_coord = 0;
+        // Set the coordinates of the selection
+        ((H5D_iter_data)iter_data).fill_coords = new long[DIM_X*DIM_Y*SPACE_RANK];   /* Coordinates of selection */
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++) {
+                ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx)] = indx;
+                ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx) + 1] = jndx;
+            } /* end for */
+
+        // Iterate through selection, verifying correct data
+        H5D_iterate_cb iter_cb = new H5D_iter_callback();
+        int op_status = -1;
+        try {
+            op_status = H5.H5Diterate(buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Diterate: " + err);
+        }
+        assertTrue("H5Diterate ", op_status == 0);
+        
+        buf_data = HDFNativeData.byteToInt(buf_array);
+
+        // Verify memory buffer the hard way
+        for (int indx = 0; indx < DIM_X; indx++)
+            for (int jndx = 0; jndx < DIM_Y; jndx++)
+                assertTrue("H5.H5Diterate: [" + indx+","+jndx + "] "+buf_data[(indx * DIM_Y) + jndx], buf_data[(indx * DIM_Y) + jndx] == 126);
+    }
+
+    @Test
+    public void testH5Dvlen_get_buf_size() {
+        String[] str_data = { "Parting", "is such", "sweet", "sorrow.",
+                "Testing", "one", "two", "three.",
+                "Dog,", "man's", "best", "friend.",
+                "Diamonds", "are", "a", "girls!",
+                "S A", "T U R", "D A Y", "night",
+                "That's", "all", "folks", "!!!" };
+        int[] size = new int[2];
+        long str_data_bytes = 0;
+        for (int idx = 0; idx < str_data.length; idx++)
+            str_data_bytes += str_data[idx].length() + 1;  //Account for terminating null
+        
+        _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            if ((H5did >= 0) && (H5dtid >= 0))
+                H5.H5DwriteString(H5did, H5dtid,
+                        HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                        HDF5Constants.H5P_DEFAULT, str_data);
+
+            _closeH5file();
+            _openH5file("dset", HDF5Constants.H5P_DEFAULT);
+            H5dtid = H5.H5Dget_type(H5did);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        try {
+            H5.H5Dvlen_get_buf_size(H5did, H5dtid, H5dsid, size);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        assertTrue("H5Dvlen_get_buf_size "+ size[0] + " == " + str_data_bytes, size[0] == str_data_bytes);
+    }
+
+    @Test
+    public void testH5Dvlen_get_buf_size_long() {
+        String[] str_data = { "Parting", "is such", "sweet", "sorrow.",
+                "Testing", "one", "two", "three.",
+                "Dog,", "man's", "best", "friend.",
+                "Diamonds", "are", "a", "girls!",
+                "S A", "T U R", "D A Y", "night",
+                "That's", "all", "folks", "!!!" };
+        long vl_size = -1;  /* Number of bytes used */
+        long str_data_bytes = 0;
+        for (int idx = 0; idx < str_data.length; idx++)
+            str_data_bytes += str_data[idx].length() + 1;  //Account for terminating null
+        
+        _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            if ((H5did >= 0) && (H5dtid >= 0))
+                H5.H5DwriteString(H5did, H5dtid,
+                        HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                        HDF5Constants.H5P_DEFAULT, str_data);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        try {
+            vl_size = H5.H5Dvlen_get_buf_size_long(H5did, H5dtid, H5dsid);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+        assertTrue("H5Dvlen_get_buf_size_long " + vl_size + " == " + str_data_bytes, vl_size == str_data_bytes);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Dvlen_read_invalid_buffer() throws Throwable {
+        String[] str_data = { "Parting", "is such", "sweet", "sorrow.",
+                "Testing", "one", "two", "three.",
+                "Dog,", "man's", "best", "friend.",
+                "Diamonds", "are", "a", "girls!",
+                "S A", "T U R", "D A Y", "night",
+                "That's", "all", "folks", "!!!" };
+        byte[] read_data = new byte[512];
+        
+        _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        
+        try {
+            H5.H5DwriteString(H5did, H5dtid,
+                    HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    HDF5Constants.H5P_DEFAULT, str_data);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        H5.H5Dread(H5did, H5dtid, 
+                     HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, 
+                     HDF5Constants.H5P_DEFAULT, read_data);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Dparams.java b/sourceTest/java/test/hdf5lib/TestH5Dparams.java
new file mode 100644
index 0000000..c56b526
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Dparams.java
@@ -0,0 +1,116 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Dparams {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dcreate_null() throws Throwable {
+        H5.H5Dcreate(-1, null, 0, 0, 0, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dcreate_invalid() throws Throwable {
+        H5.H5Dcreate(-1, "Bogus", -1, -1, -1, -1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dcreate_anon_invalid() throws Throwable {
+        H5.H5Dcreate_anon(-1, -1, -1, -1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_access_plist_invalid() throws Throwable {
+        H5.H5Dget_access_plist(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_create_plist_invalid() throws Throwable {
+        H5.H5Dget_create_plist(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_offset_invalid() throws Throwable {
+        H5.H5Dget_offset(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_space_invalid() throws Throwable {
+        H5.H5Dget_space(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_type_invalid() throws Throwable {
+        H5.H5Dget_type(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dget_space_status_invalid() throws Throwable {
+        int[] status = new int[2];
+        H5.H5Dget_space_status(-1, status);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dget_space_status_null() throws Throwable {
+        H5.H5Dget_space_status(-1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dset_extent_status_invalid() throws Throwable {
+        long[] size = new long[2];
+        H5.H5Dset_extent(-1, size);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dset_extent_status_null() throws Throwable {
+        H5.H5Dset_extent(-1, null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dopen_null() throws Throwable {
+        H5.H5Dopen(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dopen_invalid() throws Throwable {
+        H5.H5Dopen(-1, "Bogus", 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dvlen_get_buf_size_invalid() throws Throwable {
+        int[] size = new int[2];
+        H5.H5Dvlen_get_buf_size(-1, -1, -1, size);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dvlen_get_buf_size_null() throws Throwable {
+        H5.H5Dvlen_get_buf_size(-1, -1, -1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Dvlen_reclaim_invalid() throws Throwable {
+        byte[] buf = new byte[2];
+        H5.H5Dvlen_reclaim(-1, -1, -1, buf);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Dvlen_reclaim_null() throws Throwable {
+        H5.H5Dvlen_reclaim(-1, -1, -1, null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Dget_storage_size_invalid() throws Throwable {
+        H5.H5Dget_storage_size(-1);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Dplist.java b/sourceTest/java/test/hdf5lib/TestH5Dplist.java
new file mode 100644
index 0000000..266ba19
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Dplist.java
@@ -0,0 +1,196 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Dplist {
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 7;
+    private static final int EDIM_X = 6;
+    private static final int EDIM_Y = 10;
+    private static final int CHUNK_X = 4;
+    private static final int CHUNK_Y = 4;
+    private static final int NDIMS = 2;
+    private static final int FILLVAL = 99;
+    private static final int RANK = 2;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    int H5dcpl_id = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+    long[] H5extdims = { EDIM_X, EDIM_Y };
+    long[] H5chunk_dims = { CHUNK_X, CHUNK_Y };
+    long[] H5maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final void _createPDataset(int fid, int dsid, String name, int dcpl_val) {
+        try {
+            H5dcpl_id = H5.H5Pcreate(dcpl_val);
+        }
+        catch (Exception err) {
+            err.printStackTrace();
+            fail("H5.H5Pcreate: " + err);
+        }
+        assertTrue("TestH5Dplist._createPDataset: ", H5dcpl_id > 0);
+
+        // Set the chunk size.
+        try {
+            H5.H5Pset_chunk(H5dcpl_id, NDIMS, H5chunk_dims);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Set the fill value for the dataset
+        try {
+            int[] fill_value = { FILLVAL };
+            H5.H5Pset_fill_value(H5dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Set the allocation time to "early". This way we can be sure
+        // that reading from the dataset immediately after creation will
+        // return the fill value.
+        try {
+            H5.H5Pset_alloc_time(H5dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        _createDataset(H5fid, H5dsid, "dset", H5dcpl_id, HDF5Constants.H5P_DEFAULT);
+    }
+
+    private final void _createDataset(int fid, int dsid, String name, int dcpl, int dapl) {
+        try {
+            H5did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, dcpl, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5Dplist._createDataset: ",H5did > 0);
+    }
+
+    @Before
+    public void createH5file() throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(RANK, H5dims, H5maxdims);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5Dplist.createH5file: " + err);
+        }
+        assertTrue("TestH5Dplist.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5Dplist.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5dcpl_id >= 0)
+            try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+        if (H5did > 0) 
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+    }
+    
+  @Test
+  public void testH5Dset_extent() {
+      int[][] write_dset_data = new int[DIM_X][DIM_Y];
+      int[][] read_dset_data = new int[DIM_X][DIM_Y];
+      int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+      // Initialize the dataset.
+      for (int indx = 0; indx < DIM_X; indx++)
+          for (int jndx = 0; jndx < DIM_Y; jndx++)
+              write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+      _createPDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DATASET_CREATE);
+
+      // Read values from the dataset, which has not been written to yet.
+      try {
+          H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+                      HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                      HDF5Constants.H5P_DEFAULT, read_dset_data);
+      }
+      catch (Exception e) {
+          e.printStackTrace();
+      }
+      assertTrue("testH5Dset_extent - H5.H5Dread: ", read_dset_data[0][0] == 99);
+
+      // Write the data to the dataset.
+      try {
+          H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+                      HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                      HDF5Constants.H5P_DEFAULT, write_dset_data);
+      }
+      catch (Exception e) {
+          e.printStackTrace();
+      }
+
+      // Read the data back.
+      try {
+          H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+                  HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                  HDF5Constants.H5P_DEFAULT, read_dset_data);
+      }
+      catch (Exception e) {
+          e.printStackTrace();
+      }
+      assertTrue("testH5Dset_extent - H5.H5Dread: ", read_dset_data[3][6] == 12);
+
+      // Extend the dataset.
+      try {
+          H5.H5Dset_extent(H5did, H5extdims);
+      }
+      catch (Exception e) {
+          e.printStackTrace();
+      }
+
+      // Read from the extended dataset.
+      try {
+          H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+                  HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                  HDF5Constants.H5P_DEFAULT, extend_dset_data);
+      }
+      catch (Exception e) {
+          e.printStackTrace();
+      }
+      assertTrue("testH5Dset_extent - H5.H5Dread: ", extend_dset_data[3][6] == 12);
+      assertTrue("testH5Dset_extent - H5.H5Dread: ", extend_dset_data[4][8] == 99);
+  }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5E.java b/sourceTest/java/test/hdf5lib/TestH5E.java
new file mode 100644
index 0000000..ab73fa8
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5E.java
@@ -0,0 +1,394 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestH5E {
+    int hdf_java_classid = -1;
+    int current_stackid = -1;
+
+    @Before
+    public void H5Eget_stack_class() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        hdf_java_classid = -1;
+        try {
+            hdf_java_classid = H5.H5Eregister_class("HDF-Java-Error",
+                    "hdf-java", "2.5");
+            current_stackid = H5.H5Eget_current_stack();
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_stack_class: " + err);
+        }
+    }
+
+    @After
+    public void H5Erestore_stack_class() {
+        try {
+            H5.H5Eunregister_class(hdf_java_classid);
+            hdf_java_classid = -1;
+            H5.H5Eclose_stack(current_stackid);
+            current_stackid = -1;
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Erestore_stack_class: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_class_name() {
+        try {
+            String class_name = H5.H5Eget_class_name(hdf_java_classid);
+            assertNotNull("H5.H5Eget_class_name: " + class_name, class_name);
+            assertEquals("H5.H5Eget_class_name: ", "HDF-Java-Error", class_name);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_class_name: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eprint2() {
+        try {
+            assertFalse(current_stackid < 0);
+            H5.H5Eprint2(current_stackid, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eprint2: " + err);
+        }
+    }
+
+    @Ignore("Tested with create_msg_major[minor]")
+    public void testH5Eclose_msg() {
+        fail("Not yet implemented");
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ecreate_msg_name_null() throws Throwable {
+        H5.H5Ecreate_msg(hdf_java_classid, HDF5Constants.H5E_MAJOR, null);
+    }
+
+    @Test
+    public void testH5Ecreate_msg_major() {
+        try {
+            int err_id = H5.H5Ecreate_msg(hdf_java_classid,
+                    HDF5Constants.H5E_MAJOR, "Error in Test");
+            assertFalse("H5.H5Ecreate_msg_major: " + err_id, err_id < 0);
+            H5.H5Eclose_msg(err_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ecreate_msg_major: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Ecreate_msg_minor() {
+        try {
+            int err_id = H5.H5Ecreate_msg(hdf_java_classid,
+                    HDF5Constants.H5E_MINOR, "Error in Test Function");
+            assertFalse("H5.H5Ecreate_msg_minor: " + err_id, err_id < 0);
+            H5.H5Eclose_msg(err_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ecreate_msg_minor: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_msg() {
+        int[] error_msg_type = { HDF5Constants.H5E_MINOR };
+        int err_id = -1;
+        String msg = null;
+        try {
+            err_id = H5.H5Ecreate_msg(hdf_java_classid,
+                    HDF5Constants.H5E_MAJOR, "Error in Test");
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_msg: " + err);
+        }
+        assertFalse("H5.H5Eget_msg: H5Ecreate_msg - " + err_id, err_id < 0);
+        try {
+            msg = H5.H5Eget_msg(err_id, error_msg_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_msg: " + err);
+        }
+        assertNotNull("H5.H5Eget_msg: " + msg, msg);
+        assertEquals("H5.H5Eget_msg: ", "Error in Test", msg);
+        assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MAJOR,
+                    error_msg_type[0]);
+        try {
+            H5.H5Eclose_msg(err_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_msg: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_msg_major() {
+
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (HDF5LibraryException hdferr) {
+            int[] error_msg_type = { HDF5Constants.H5E_MAJOR };
+            String msg = null;
+            try {
+                msg = H5.H5Eget_msg(hdferr.getMajorErrorNumber(),
+                        error_msg_type);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Eget_msg: " + err);
+            }
+            assertNotNull("H5.H5Eget_msg: " + msg, msg);
+            assertEquals("H5.H5Eget_msg: ", "Invalid arguments to routine",
+                        msg);
+            assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MAJOR,
+                        error_msg_type[0]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_msg: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_msg_minor() {
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (HDF5LibraryException hdferr) {
+            int[] error_msg_type = { HDF5Constants.H5E_MINOR };
+            String msg = null;
+            try {
+                msg = H5.H5Eget_msg(hdferr.getMinorErrorNumber(),
+                        error_msg_type);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Eget_msg: " + err);
+            }
+            assertNotNull("H5.H5Eget_msg: " + msg, msg);
+            assertEquals("H5.H5Eget_msg: ", "Inappropriate type", msg);
+            assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MINOR,
+                        error_msg_type[0]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_msg: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Ecreate_stack() {
+        int stk_id = -1;
+        try {
+            stk_id = H5.H5Ecreate_stack();
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ecreate_stack: " + err);
+        }
+        assertFalse("H5.H5Ecreate_stack: " + stk_id, stk_id < 0);
+        try {
+            H5.H5Eclose_stack(stk_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ecreate_stack: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Epop() {
+        try {
+            H5.H5Eset_current_stack(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+
+        // save current stack contents
+        try {
+            current_stackid = H5.H5Eget_current_stack();
+        }
+        catch (HDF5LibraryException err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        long num_msg = -1;
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        assertTrue("H5.H5Epop #:" + num_msg, num_msg == 0);
+
+        try {
+            num_msg = H5.H5Eget_num(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        assertTrue("H5.H5Epop #:" + num_msg, num_msg == 2);
+
+        try {
+            H5.H5Epop(current_stackid, 1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        try {
+            num_msg = H5.H5Eget_num(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        assertTrue("H5.H5Epop", num_msg == 1);
+    }
+
+    @Test
+    public void testH5EprintInt() {
+        assertFalse(current_stackid < 0);
+        try {
+            H5.H5Eprint2(current_stackid, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5EprintInt: " + err);
+        }
+    }
+
+    @Test
+    public void testH5EclearInt() {
+        try {
+            H5.H5Eclear(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5EclearInt: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eclear2() {
+        try {
+            H5.H5Eclear2(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eclear2: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eauto_is_v2() {
+        boolean is_v2 = false;
+        try {
+            is_v2 = H5.H5Eauto_is_v2(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eauto_is_v2: " + err);
+        }
+        assertTrue("H5.H5Eauto_is_v2: ", is_v2);
+    }
+
+    @Test
+    public void testH5Eget_num() {
+        long num_msg = -1;
+        try {
+            num_msg = H5.H5Eget_num(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_num: " + err);
+        }
+        assertTrue("H5.H5Eget_num", num_msg == 0);
+    }
+
+    @Test
+    public void testH5Eget_num_with_msg() {
+        try {
+            H5.H5Eset_current_stack(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+
+        // save current stack contents
+        try {
+            current_stackid = H5.H5Eget_current_stack();
+        }
+        catch (HDF5LibraryException err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+
+        long num_msg = -1;
+        try {
+            num_msg = H5.H5Eget_num(current_stackid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+        assertTrue("H5.H5Eget_num_with_msg #:" + num_msg, num_msg > 0);
+    }
+
+    @Ignore("API1.6")
+    public void testH5Eprint() {
+        fail("Not yet implemented");
+    }
+
+    @Ignore("API1.6")
+    public void testH5Eclear() {
+        fail("Not yet implemented");
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Edefault.java b/sourceTest/java/test/hdf5lib/TestH5Edefault.java
new file mode 100644
index 0000000..0b95e8b
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Edefault.java
@@ -0,0 +1,530 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Edefault {
+
+    @Before
+    public void H5Eset_default_stack() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            // Clear any active stack messages
+            H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (HDF5LibraryException err) {
+            err.printStackTrace();
+            fail("H5Eset_default_stack: " + err);
+        }
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eprint2_invalid_classid() throws Throwable {
+        H5.H5Eprint2(-1, null);
+    }
+
+    @Test
+    public void testH5Eprint() {
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+        try {
+            H5.H5Eprint2(HDF5Constants.H5E_DEFAULT, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eprint: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_current_stack() {
+        long num_msg = -1;
+        long num_msg_default = -1;
+        int stack_id = -1;
+        int stack_id_default = HDF5Constants.H5E_DEFAULT;
+        try {
+            H5.H5Fopen("test", 0, 1); 
+        }
+        catch (Throwable err) {
+            //default stack id will be different after exception 
+            stack_id_default = HDF5Constants.H5E_DEFAULT;
+            //err.printStackTrace(); //This will clear the error stack
+        }
+        // Verify we have the correct number of messages
+        try {
+            num_msg_default = H5.H5Eget_num(stack_id_default);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+                num_msg_default == 2);
+
+        //Save a copy of the current stack and clears the current stack
+        try {
+            stack_id = H5.H5Eget_current_stack();
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+                + stack_id, stack_id < 0);
+        assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+                + stack_id, stack_id == stack_id_default);
+
+        // Verify we have the correct number of messages
+        try {
+            num_msg_default = H5.H5Eget_num(stack_id_default);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+                num_msg_default == 0);
+
+        //Verify the copy has the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+                num_msg == 2);
+       
+        try {
+            H5.H5Eclose_stack(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eget_current_stack_pop() {
+        long num_msg = -1;
+        long num_msg_default = -1;
+        int stack_id = -1;
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+            //err.printStackTrace(); //This will clear the error stack
+        }
+
+        // Verify we have the correct number of messages
+        try {
+            num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+                num_msg_default == 2);
+
+        //Save a copy of the current stack and clears the current stack
+        try {
+            stack_id = H5.H5Eget_current_stack();
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+                + stack_id, stack_id < 0);
+        assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+                + stack_id, stack_id == HDF5Constants.H5E_DEFAULT);
+
+        // Verify we have the correct number of messages
+        try {
+            num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+                num_msg_default == 0);
+
+        //Verify the copy has the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+                num_msg == 2);
+
+        //Generate errors on default stack
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+            //err.printStackTrace(); //This will clear the error stack
+        }
+
+        // Verify we have the correct number of messages
+        try {
+            num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+                num_msg_default == 2);
+
+        //Remove one message from the current stack
+        try {
+            H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+            num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: pop #:" + num_msg_default,
+                num_msg_default == 1);
+
+        //Verify the copy still has the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+                num_msg == 2);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eclose_stack_invalid_stackid() throws Throwable {
+        H5.H5Eclose_stack(-1);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eget_class_name_invalid_classid() throws Throwable {
+        H5.H5Eget_class_name(-1);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eget_class_name_invalid_classname() throws Throwable {
+        H5.H5Eget_class_name(HDF5Constants.H5E_DEFAULT);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eclose_msg_invalid_errid() throws Throwable {
+        H5.H5Eclose_msg(-1);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Ecreate_msg_invalid_errid() throws Throwable {
+        H5.H5Ecreate_msg(-1, HDF5Constants.H5E_MAJOR, "null");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eget_msg_invalid_msgid() throws Throwable {
+        H5.H5Eget_msg(-1, null);
+    }
+
+    @Test
+    public void testH5Ecreate_stack() {
+        try {
+            int stack_id = H5.H5Ecreate_stack();
+            assertTrue("H5.H5Ecreate_stack", stack_id > 0);
+            H5.H5Eclose_stack(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ecreate_stack: " + err);
+        }
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eset_current_stack_invalid_stkid() throws Throwable {
+        H5.H5Eset_current_stack(-1);
+    }
+
+    @Test
+    public void testH5Eset_current_stack() {
+        long num_msg = -1;
+        int stack_id = -1;
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+            //err.printStackTrace(); //This will clear the error stack
+        }
+        
+        // Verify we have the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+                    num_msg == 2);
+        
+        //Save a copy of the current stack
+        try {
+            stack_id = H5.H5Eget_current_stack();
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertFalse("H5.H5Eset_current_stack: get_current_stack - "
+                    + stack_id, stack_id < 0);
+        assertFalse("H5.H5Eset_current_stack: get_current_stack - "
+                + stack_id, stack_id == HDF5Constants.H5E_DEFAULT);
+        
+        //Verify the copy has the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+                    num_msg == 2);
+
+        //Generate errors on default stack
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+            //err.printStackTrace(); //This will clear the error stack
+        }
+
+        // Verify we have the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+                num_msg == 2);
+        
+        //Remove one message from the current stack
+        try {
+            H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: pop #:" + num_msg,
+                    num_msg == 1);
+        
+        //Verify the copy still has the correct number of messages
+        try {
+            num_msg = H5.H5Eget_num(stack_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+                    num_msg == 2);
+
+        try {
+            H5.H5Eset_current_stack(stack_id);
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eset_current_stack: " + err);
+        }
+        assertTrue("H5.H5Eset_current_stack: get_num - " + num_msg,
+                    num_msg == 2);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Epop_invalid_stkid() throws Throwable {
+        H5.H5Epop(-1, 0);
+    }
+
+    @Test
+    public void testH5Epop() throws Throwable {
+        long num_msg = -1;
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+        assertTrue("H5.H5Epop before #:" + num_msg, num_msg == 2);
+        try {
+            H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Epop: " + err);
+        }
+        assertTrue("H5.H5Epop after #:" + num_msg, num_msg == 1);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5EprintInt_invalid_classid() throws Throwable {
+        H5.H5Eprint2(-1, null);
+    }
+
+    @Test
+    public void testH5EprintInt() {
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+        try {
+            H5.H5Eprint2(HDF5Constants.H5E_DEFAULT, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5EprintInt: " + err);
+        }
+    }
+
+    @Test
+    public void testH5EclearInt() {
+        try {
+            H5.H5Eclear(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5EclearInt: " + err);
+        }
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eclear2_invalid_stkid() throws Throwable {
+        H5.H5Eclear2(-1);
+    }
+
+    @Test
+    public void testH5Eclear() {
+        try {
+            H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eclear2: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Eclear2_with_msg() {
+        long num_msg = -1;
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eclear2_with_msg: " + err);
+        }
+        assertTrue("H5.H5Eclear2_with_msg before #:" + num_msg,
+                    num_msg == 2);
+        try {
+            H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eclear2_with_msg: " + err);
+        }
+            assertTrue("H5.H5Eclear2_with_msg after #:" + num_msg, num_msg == 0);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eauto_is_v2_invalid_stkid() throws Throwable {
+        H5.H5Eauto_is_v2(-1);
+    }
+
+    @Test
+    public void testH5Eauto_is_v2() {
+        boolean is_v2 = false;
+        try {
+            is_v2 = H5.H5Eauto_is_v2(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eauto_is_v2: " + err);
+        }
+        assertTrue("H5.H5Eauto_is_v2: ", is_v2);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eget_num_invalid_stkid() throws Throwable {
+        H5.H5Eget_num(-1);
+    }
+
+    @Test
+    public void testH5Eget_num() {
+        long num_msg = -1;
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_num: " + err);
+        }
+        assertTrue("H5.H5Eget_num #:" + num_msg, num_msg == 0);
+    }
+
+    @Test
+    public void testH5Eget_num_with_msg() {
+        long num_msg = -1;
+        try {
+            H5.H5Fopen("test", 0, 1);
+        }
+        catch (Throwable err) {
+        }
+        try {
+            num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Eget_num_with_msg: " + err);
+        }
+        assertTrue("H5.H5Eget_num_with_msg #:" + num_msg, num_msg > 0);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Eregister.java b/sourceTest/java/test/hdf5lib/TestH5Eregister.java
new file mode 100644
index 0000000..ff6e8ce
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Eregister.java
@@ -0,0 +1,54 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Eregister {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Eregister_class_cls_name_null() throws Throwable {
+        H5.H5Eregister_class(null, "libname", "version");
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Eregister_class_lib_name_null() throws Throwable {
+        H5.H5Eregister_class("clsname", null, "version");
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Eregister_class_version_null() throws Throwable {
+        H5.H5Eregister_class("clsname", "libname", null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Eunregister_class_invalid_classid() throws Throwable {
+        H5.H5Eunregister_class(-1);
+    }
+
+    @Test
+    public void testH5Eregister_class() {
+        int hdf_java_classid = -1;
+        try {
+            hdf_java_classid = H5.H5Eregister_class("HDF-Java-Error",
+                    "hdf-java", "2.5");
+        }
+        catch (Throwable err) {
+            fail("H5.H5Eregister_class: " + err);
+        }
+        try {
+            H5.H5Eunregister_class(hdf_java_classid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Eunregister_class: " + err);
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5F.java b/sourceTest/java/test/hdf5lib/TestH5F.java
new file mode 100644
index 0000000..3938ecf
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5F.java
@@ -0,0 +1,229 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5F {
+    private static final String H5_FILE = "test.h5";
+
+    private static final int COUNT_OBJ_FILE = 1;
+    private static final int COUNT_OBJ_DATASET = 0;
+    private static final int COUNT_OBJ_GROUP = 0;
+    private static final int COUNT_OBJ_DATATYPE = 0;
+    private static final int COUNT_OBJ_ATTR = 0;
+    private static final int COUNT_OBJ_ALL = (COUNT_OBJ_FILE
+            + COUNT_OBJ_DATASET + COUNT_OBJ_GROUP + COUNT_OBJ_DATATYPE + COUNT_OBJ_ATTR);
+    private static final int[] OBJ_COUNTS = { COUNT_OBJ_FILE,
+            COUNT_OBJ_DATASET, COUNT_OBJ_GROUP, COUNT_OBJ_DATATYPE,
+            COUNT_OBJ_ATTR, COUNT_OBJ_ALL };
+    private static final int[] OBJ_TYPES = { HDF5Constants.H5F_OBJ_FILE,
+            HDF5Constants.H5F_OBJ_DATASET, HDF5Constants.H5F_OBJ_GROUP,
+            HDF5Constants.H5F_OBJ_DATATYPE, HDF5Constants.H5F_OBJ_ATTR,
+            HDF5Constants.H5F_OBJ_ALL };
+    int H5fid = -1;
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    @Before
+    public void createH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+        _deleteFile(H5_FILE);
+    }
+
+    @Test
+    public void testH5Fget_create_plist() {
+        int plist = -1;
+
+        try {
+            plist = H5.H5Fget_create_plist(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_create_plist: " + err);
+        }
+        assertTrue(plist > 0);
+        try {H5.H5Pclose(plist);} catch (HDF5LibraryException e) {e.printStackTrace();}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Fget_create_plist_closed() throws Throwable {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+
+        // it should fail because the file was closed.
+        H5.H5Fget_create_plist(fid);
+    }
+
+    @Test
+    public void testH5Fget_access_plist() {
+        int plist = -1;
+
+        try {
+            plist = H5.H5Fget_access_plist(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_access_plist: " + err);
+        }
+        assertTrue(plist > 0);
+        try {H5.H5Pclose(plist);} catch (HDF5LibraryException e) {e.printStackTrace();}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Fget_access_plist_closed() throws Throwable {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+
+        // it should fail because the file was closed.
+        H5.H5Fget_access_plist(fid);
+    }
+
+    @Test
+    public void testH5Fget_intent_rdwr() {
+        int intent = 0;
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+        try {
+            intent = H5.H5Fget_intent(fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_intent: " + err);
+        }
+        assertEquals(intent, HDF5Constants.H5F_ACC_RDWR);
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+    }
+
+    @Test
+    public void testH5Fget_intent_rdonly() {
+        int intent = 0;
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+        try {
+            intent = H5.H5Fget_intent(fid);
+            System.err.println(intent);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_intent: " + err);
+        }
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+
+        // BROKEN in HDF5 1.8.13 and HDF-Java 2.10.1
+        //assertEquals(intent, HDF5Constants.H5F_ACC_RDONLY);
+    }
+
+    @Test
+    public void testH5Fget_obj_count() {
+        long count = -1;
+
+        for (int i = 0; i < OBJ_TYPES.length; i++) {
+            try {
+                count = H5.H5Fget_obj_count_long(H5fid, OBJ_TYPES[i]);
+            }
+            catch (Throwable err) {
+                fail("H5.H5Fget_obj_count: " + err);
+            }
+
+            assertEquals(count, OBJ_COUNTS[i]);
+        }
+    }
+
+    @Test
+    public void testH5Fget_obj_ids() {
+        long count = 0;
+        int max_objs = 100;
+        int[] obj_id_list = new int[max_objs];
+        int[] open_obj_counts = new int[OBJ_TYPES.length];
+
+        for (int i = 0; i < OBJ_TYPES.length; i++)
+            open_obj_counts[i] = 0;
+
+        open_obj_counts[0] = 1;
+        for (int i = 0; i < OBJ_TYPES.length - 1; i++)
+            open_obj_counts[OBJ_TYPES.length - 1] += open_obj_counts[i];
+
+        for (int i = 0; i < OBJ_TYPES.length; i++) {
+            try {
+                count = H5.H5Fget_obj_ids_long(H5fid, OBJ_TYPES[i], max_objs,
+                        obj_id_list);
+            }
+            catch (Throwable err) {
+                fail("H5.H5Fget_obj_ids: " + err);
+            }
+            assertEquals(count, open_obj_counts[i]);
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Fbasic.java b/sourceTest/java/test/hdf5lib/TestH5Fbasic.java
new file mode 100644
index 0000000..2e55754
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Fbasic.java
@@ -0,0 +1,284 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Fbasic {
+    private static final String H5_FILE = "test.h5";
+    private static final String TXT_FILE = "test.txt";
+    int H5fid = -1;
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    @Before
+    public void createH5file() throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+        _deleteFile(H5_FILE);
+    }
+
+    @Test
+    public void testH5Fcreate() {
+        assertTrue(H5fid > 0);
+    }
+
+    @Test
+    public void testH5Fis_hdf5() {
+        boolean isH5 = false;
+
+        try {
+            isH5 = H5.H5Fis_hdf5(H5_FILE);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fis_hdf5 failed on " + H5_FILE + ": " + err);
+        }
+        assertTrue(isH5 == true);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Fcreate_EXCL() throws Throwable {
+        H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_EXCL,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Fopen_read_only() throws Throwable {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+
+        // set the file to read-only
+        File file = new File(H5_FILE);
+        if (file.setReadOnly()) {
+            // this should fail.
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+
+            try {
+                H5.H5Fclose(fid);
+            }
+            catch (Exception ex) {
+            }
+        }
+        else {
+            fail("File.setWritable(true) failed.");
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Freopen_closed() throws Throwable {
+        int fid = -1;
+        int fid2 = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+
+        // should fail because the file was closed.
+        fid2 = H5.H5Freopen(fid);
+    }
+
+    @Test
+    public void testH5Freopen() {
+        int fid = -1;
+        int fid2 = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            fid2 = H5.H5Freopen(fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Freopen: " + err);
+        }
+        assertTrue(fid2 > 0);
+
+        try {
+            H5.H5Fclose(fid2);
+        }
+        catch (Exception ex) {
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+    }
+
+    @Test
+    public void testH5Fclose() {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fclose: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Fclose_twice() throws Throwable {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fclose: " + err);
+        }
+
+        // it should fail since the file was closed.
+        H5.H5Fclose(fid);
+    }
+
+    @Test
+    public void testH5Fget_freespace() {
+        long freeSpace = 0;
+
+        try {
+            freeSpace = H5.H5Fget_freespace(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_freespace: " + err);
+        }
+        assertEquals(freeSpace, 0);
+    }
+
+    @Test
+    public void testH5Fget_filesize() {
+        long fileSize = 0;
+
+        try {
+            fileSize = H5.H5Fget_filesize(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_freespace: " + err);
+        }
+        assertTrue(fileSize > 0);
+    }
+
+    @Test
+    public void testH5Fget_mdc_hit_rate() {
+        double rate;
+
+        try {
+            rate = H5.H5Fget_mdc_hit_rate(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_mdc_hit_rate: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Fget_mdc_size() {
+        int nentries = -1;
+        long cache_sizes[] = new long[3];
+
+        try {
+            nentries = H5.H5Fget_mdc_size(H5fid, cache_sizes);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_mdc_size: " + err);
+        }
+        assertTrue("H5.H5Fget_mdc_size #:" + nentries, nentries == 4);
+    }
+
+    @Test
+    public void testH5Freset_mdc_hit_rate_stats() {
+
+        try {
+            H5.H5Freset_mdc_hit_rate_stats(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Freset_mdc_hit_rate_stats: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Fget_name() {
+        String fname = null;
+
+        try {
+            fname = H5.H5Fget_name(H5fid);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fget_name: " + err);
+        }
+        assertNotNull(fname);
+        assertEquals(fname, H5_FILE);
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Fparams.java b/sourceTest/java/test/hdf5lib/TestH5Fparams.java
new file mode 100644
index 0000000..13a4ca7
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Fparams.java
@@ -0,0 +1,153 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Fparams {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Fcreate_null() throws Throwable {
+        H5.H5Fcreate(null, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Fopen_null() throws Throwable {
+        H5.H5Fopen(null, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Fis_hdf5_null() throws Throwable {
+        H5.H5Fis_hdf5(null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Fmount_null() throws Throwable {
+        H5.H5Fmount(-1, null, -1, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Funmount_null() throws Throwable {
+        H5.H5Funmount(-1, null);
+    }
+
+    @Test
+    public void testH5Fis_hdf5_text() {
+        File txtFile = null;
+        boolean isH5 = false;
+
+        try {
+            txtFile = new File("test.txt");
+            if (!txtFile.exists())
+                txtFile.createNewFile();
+            isH5 = H5.H5Fis_hdf5("test.txt");
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fis_hdf5 failed on test.txt: " + err);
+        }
+
+        assertFalse(isH5);
+
+        try {
+            txtFile.delete();
+        }
+        catch (SecurityException e) {
+            ;// e.printStackTrace();
+        }
+    }
+
+    @Test
+    public void testH5Fcreate() {
+        int fid = -1;
+        File file = null;
+
+        try {
+            fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            if (fid > 0) {
+                H5.H5Fclose(fid);
+            }
+            file = new File("test.h5");
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+    }
+
+    @Test
+    public void testH5Fflush_global() {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fflush: " + err);
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+    }
+
+    @Test
+    public void testH5Fflush_local() {
+        int fid = -1;
+
+        try {
+            fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fopen: " + err);
+        }
+
+        try {
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            fail("H5.H5Fflush: " + err);
+        }
+
+        try {
+            H5.H5Fclose(fid);
+        }
+        catch (Exception ex) {
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5G.java b/sourceTest/java/test/hdf5lib/TestH5G.java
new file mode 100644
index 0000000..ad04115
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5G.java
@@ -0,0 +1,490 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5G {
+    private static final String H5_FILE = "test.h5";
+    private static final String H5_FILE2 = "test2.h5";
+    private static final String[] GROUPS = { "/G1", "/G1/G11", "/G1/G12",
+            "/G1/G11/G111", "/G1/G11/G112", "/G1/G11/G113", "/G1/G11/G114" };
+    private static final String[] GROUPS2 = { "/G1", "/G1/G14", "/G1/G12", "/G1/G13", "/G1/G11"};
+    int H5fid = -1;
+    int H5fid2 = -1;
+
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+
+        return gid;
+    }
+    
+    private final int _createGroup2(int fid, String name) {
+    	int gid = -1;
+    	int gcpl = -1;
+    	try{
+    		gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); //create gcpl
+    		if (gcpl >= 0) {
+    			H5.H5Pset_link_creation_order(gcpl,
+    					HDF5Constants.H5P_CRT_ORDER_TRACKED
+    					+ HDF5Constants.H5P_CRT_ORDER_INDEXED);//Set link creation order
+    		}
+    	} 
+    	catch (final Exception ex) {
+    		fail("H5.H5Pcreate() failed. " + ex);
+    	}
+    	try {
+    		gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+    				gcpl, HDF5Constants.H5P_DEFAULT);
+    	}
+    	catch (Throwable err) {
+    		err.printStackTrace();
+    		fail("H5.H5Gcreate: " + err);
+    	}
+    	try {H5.H5Pclose(gcpl);} catch (final Exception ex) {}
+    	
+    	return gid;
+    }
+
+    private final int _openGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            gid = H5.H5Gopen(fid, name, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            gid = -1;
+            err.printStackTrace();
+            fail("H5.H5Gopen: " + err);
+        }
+
+        return gid;
+    }
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    @Before
+    public void createH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        
+        H5fid2 = H5.H5Fcreate(H5_FILE2, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+        int gid = -1;
+
+        for (int i = 0; i < GROUPS.length; i++) {
+            gid = _createGroup(H5fid, GROUPS[i]);
+            assertTrue(gid > 0);
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+        }
+        
+        for (int i = 0; i < GROUPS2.length; i++) {
+            gid = _createGroup2(H5fid2, GROUPS2[i]);
+            assertTrue(gid > 0);
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+        }
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        H5.H5Fflush(H5fid2, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+        if (H5fid2 > 0) {
+            try {H5.H5Fclose(H5fid2);} catch (Exception ex) {}
+        }
+       _deleteFile(H5_FILE);
+       _deleteFile(H5_FILE2);
+    }
+
+    @Test
+    public void testH5Gopen() {
+        for (int i = 0; i < GROUPS.length; i++) {
+            int gid = _openGroup(H5fid, GROUPS[i]);
+            assertTrue(gid > 0);
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+        }
+    }
+
+    @Test
+    public void testH5Gget_create_plist() {
+        int gid = -1;
+        int pid = -1;
+
+        for (int i = 0; i < GROUPS.length; i++) {
+            gid = _openGroup(H5fid, GROUPS[i]);
+            assertTrue(gid > 0);
+
+            try {
+                pid = H5.H5Gget_create_plist(gid);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Gget_create_plist: " + err);
+            }
+            assertTrue(pid > 0);
+
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+        }
+    }
+
+    @Test
+    public void testH5Gget_info() {
+        H5G_info_t info = null;
+
+        for (int i = 0; i < GROUPS.length; i++) {
+
+            try {
+                info = H5.H5Gget_info(H5fid);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Gget_info: " + err);
+            }
+            assertNotNull(info);
+        }
+    }
+
+    @Test
+    public void testH5Gget_info_by_name() {
+        H5G_info_t info = null;
+
+        for (int i = 0; i < GROUPS.length; i++) {
+            try {
+                info = H5.H5Gget_info_by_name(H5fid, GROUPS[i],
+                        HDF5Constants.H5P_DEFAULT);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Gget_info_by_name: " + err);
+            }
+            assertNotNull(info);
+        }
+    }
+
+    @Test
+    public void testH5Gget_info_by_idx() {
+        H5G_info_t info = null;
+        for (int i = 0; i < 2; i++) {
+            try {
+                info = H5.H5Gget_info_by_idx(H5fid, "/G1",
+                        HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC,
+                        i, HDF5Constants.H5P_DEFAULT);
+            }
+            catch (Throwable err) {
+                err.printStackTrace();
+                fail("H5.H5Gget_info_by_idx: " + err);
+            }
+            assertNotNull(info);
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_all() {
+        H5G_info_t info = null;
+
+        int gid = _openGroup(H5fid, GROUPS[0]);
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+        assertNotNull(info);
+        assertTrue("number of links is empty", info.nlinks > 0);
+        String objNames[] = new String[(int) info.nlinks];
+        int objTypes[] = new int[(int) info.nlinks];
+        int lnkTypes[] = new int[(int) info.nlinks];
+        long objRefs[] = new long[(int) info.nlinks];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_all(H5fid, GROUPS[0], objNames,
+                    objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+
+        assertTrue("number found[" + names_found + "] different than expected["
+                + objNames.length + "]", names_found == objNames.length);
+        for (int i = 0; i < objNames.length; i++) {
+            assertNotNull("name #" + i + " does not exist", objNames[i]);
+            assertTrue(objNames[i].length() > 0);
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_all_gid() {
+        H5G_info_t info = null;
+
+        int gid = _openGroup(H5fid, GROUPS[0]);
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        assertNotNull(info);
+        assertTrue("number of links is empty", info.nlinks > 0);
+        String objNames[] = new String[(int) info.nlinks];
+        long objRefs[] = new long[(int) info.nlinks];
+        int lnkTypes[] = new int[(int) info.nlinks];
+        int objTypes[] = new int[(int) info.nlinks];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_all(gid, null, objNames, objTypes, lnkTypes,
+                    objRefs, HDF5Constants.H5_INDEX_NAME);
+        }
+        catch (Throwable err) {
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+
+        assertTrue("number found[" + names_found + "] different than expected["
+                + objNames.length + "]", names_found == objNames.length);
+        for (int i = 0; i < objNames.length; i++) {
+            assertNotNull("name #" + i + " does not exist", objNames[i]);
+            assertTrue(objNames[i].length() > 0);
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_all_gid2() {
+        H5G_info_t info = null;
+
+        int gid = _openGroup(H5fid, GROUPS[1]);
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        assertNotNull(info);
+        assertTrue("number of links is empty", info.nlinks > 0);
+        String objNames[] = new String[(int) info.nlinks];
+        long objRefs[] = new long[(int) info.nlinks];
+        int lnkTypes[] = new int[(int) info.nlinks];
+        int objTypes[] = new int[(int) info.nlinks];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_all(gid, null, objNames, objTypes, lnkTypes,
+                    objRefs, HDF5Constants.H5_INDEX_NAME);
+        }
+        catch (Throwable err) {
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+
+        assertTrue("number found[" + names_found + "] different than expected["
+                + objNames.length + "]", names_found == objNames.length);
+        for (int i = 0; i < objNames.length; i++) {
+            assertNotNull("name #" + i + " does not exist", objNames[i]);
+            assertTrue(objNames[i].length() > 0);
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_max() {
+        int gid = _openGroup(H5fid, GROUPS[0]);
+        int groups_max_size = GROUPS.length + 1;
+        String objNames[] = new String[groups_max_size];
+        int objTypes[] = new int[groups_max_size];
+        int lnkTypes[] = new int[groups_max_size];
+        long objRefs[] = new long[groups_max_size];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_max(gid, objNames, objTypes, lnkTypes,
+                    objRefs, groups_max_size);
+        }
+        catch (Throwable err) {
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_max: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+
+        // expected number does not include root group
+        assertTrue("number found[" + names_found + "] different than expected["
+                + (GROUPS.length - 1) + "]", names_found == (GROUPS.length - 1));
+        for (int i = 0; i < GROUPS.length-1; i++) {
+            assertNotNull("name #"+i+" does not exist",objNames[i]);
+            assertTrue(objNames[i].length()>0);
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_max_limit() {
+        int gid = _openGroup(H5fid, GROUPS[0]);
+        int groups_max_size = GROUPS.length - 3;
+        String objNames[] = new String[groups_max_size];
+        int objTypes[] = new int[groups_max_size];
+        int lnkTypes[] = new int[groups_max_size];
+        long objRefs[] = new long[groups_max_size];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_max(gid, objNames, objTypes, lnkTypes,
+                    objRefs, groups_max_size);
+        }
+        catch (Throwable err) {
+            try {
+                H5.H5Gclose(gid);
+            }
+            catch (Exception ex) {
+            }
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_max: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+
+        assertTrue("number found[" + names_found + "] different than expected["
+                + groups_max_size + "]", names_found == groups_max_size);
+        for (int i = 0; i < objNames.length; i++) {
+            assertNotNull("name #" + i + " does not exist", objNames[i]);
+            assertTrue(objNames[i].length() > 0);
+        }
+    }
+    
+    @Test
+    public void testH5Gget_obj_info_all_byIndexType() {
+        H5G_info_t info = null;
+
+        int gid = _openGroup(H5fid2, GROUPS2[0]);
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+        assertNotNull(info);
+        assertTrue("number of links is empty", info.nlinks > 0);
+        String objNames[] = new String[(int) info.nlinks];
+        int objTypes[] = new int[(int) info.nlinks];
+        int lnkTypes[] = new int[(int) info.nlinks];
+        long objRefs[] = new long[(int) info.nlinks];
+
+        
+        try {
+            H5.H5Gget_obj_info_all(H5fid2, GROUPS2[0], objNames,
+                    objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_CRT_ORDER);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+       
+        assertEquals("G12",objNames[1]);
+        assertEquals("G13", objNames[2] );
+        assertEquals("G11", objNames[3] );
+        
+        try {
+           H5.H5Gget_obj_info_all(H5fid2, GROUPS2[0], objNames,
+                    objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+        
+        assertEquals("G12",objNames[1]);
+        assertEquals("G13", objNames[2] );
+        assertEquals("G14", objNames[3] );
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Gbasic.java b/sourceTest/java/test/hdf5lib/TestH5Gbasic.java
new file mode 100644
index 0000000..6f80fbf
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Gbasic.java
@@ -0,0 +1,345 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Gbasic {
+    private static final String H5_FILE = "test.h5";
+    int H5fid = -1;
+
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+
+        return gid;
+    }
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    @Before
+    public void createH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+        _deleteFile(H5_FILE);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Gcreate_null() throws Throwable {
+        int gid = -1;
+
+        // it should fail because the group name is null
+        gid = H5.H5Gcreate(H5fid, null, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gcreate_invalid() throws Throwable {
+        H5.H5Gcreate(-1, "Invalid ID", HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Gcreate() {
+        int gid = -1;
+        try {
+            gid = H5.H5Gcreate(H5fid, "/testH5Gcreate",
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+                        HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+        assertTrue(gid > 0);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Gclose() {
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Throwable err) {
+            fail("H5Gclose: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gcreate_exists() throws Throwable {
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+        // it should failed now because the group already exists in file
+        gid = H5.H5Gcreate(H5fid, "/testH5Gcreate",
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Gcreate_anon() {
+        int gid = -1;
+        try {
+            gid = H5.H5Gcreate_anon(H5fid, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate_anon: " + err);
+        }
+        assertTrue(gid > 0);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Gopen_null() throws Throwable {
+        int gid = -1;
+
+        gid = H5.H5Gopen(H5fid, null, HDF5Constants.H5P_DEFAULT);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gopen_invalid() throws Throwable {
+        H5.H5Gopen(-1, "Invalid ID", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gopen_not_exists() throws Throwable {
+        int gid = -1;
+
+         gid = H5.H5Gopen(H5fid, "Never_created", HDF5Constants.H5P_DEFAULT);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Gopen() {
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+        try {
+            gid = H5.H5Gopen(H5fid, "/testH5Gcreate",
+                        HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gopen: " + err);
+        }
+        assertTrue(gid > 0);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_create_plist_invalid() throws Throwable {
+        H5.H5Gget_create_plist(-1);
+    }
+
+    @Test
+    public void testH5Gget_create_plist() {
+        int pid = -1;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {
+            pid = H5.H5Gget_create_plist(gid);
+        }
+        catch (Throwable err) {
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+            err.printStackTrace();
+            fail("H5.H5Gget_create_plist: " + err);
+        }
+        assertTrue(pid > 0);
+
+        try {H5.H5Pclose(pid);} catch (Exception ex) {}
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_info_invalid() throws Throwable {
+        H5.H5Gget_info(-1);
+    }
+
+    @Test
+    public void testH5Gget_info() {
+        H5G_info_t info = null;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        assertNotNull(info);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Gget_info_by_name_null() throws Throwable {
+        H5.H5Gget_info_by_name(-1, null, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_info_by_name_invalid() throws Throwable {
+        H5.H5Gget_info_by_name(-1, "/testH5Gcreate", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_info_by_name_not_exists() throws Throwable {
+        H5.H5Gget_info_by_name(H5fid, "/testH5Gcreate",
+                HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Gget_info_by_name() {
+        H5G_info_t info = null;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {
+            info = H5.H5Gget_info_by_name(gid, "/testH5Gcreate",
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+            err.printStackTrace();
+            fail("H5.H5Gget_info_by_name: " + err);
+        }
+        assertNotNull(info);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Gget_info_by_name_fileid() {
+        H5G_info_t info = null;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+        try {
+            info = H5.H5Gget_info_by_name(H5fid, "/testH5Gcreate",
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            try {H5.H5Gclose(gid);} catch (Exception ex) {}
+            err.printStackTrace();
+            fail("H5.H5Gget_info_by_name: " + err);
+        }
+        assertNotNull(info);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Gget_info_by_idx_null() throws Throwable {
+        H5.H5Gget_info_by_idx(-1, null, HDF5Constants.H5P_DEFAULT,
+                HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_info_by_idx_invalid() throws Throwable {
+        H5.H5Gget_info_by_idx(-1, "/testH5Gcreate", HDF5Constants.H5P_DEFAULT,
+                HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Gget_info_by_idx_not_exists() throws Throwable {
+        H5.H5Gget_info_by_idx(H5fid, "/testH5Gcreate",
+                HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 1,
+                HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Gget_info_by_idx() {
+        H5G_info_t info = null;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+
+        try {
+            info = H5.H5Gget_info_by_idx(gid, "/", HDF5Constants.H5_INDEX_NAME,
+                    HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info_by_idx: " + err);
+        }
+        assertNotNull(info);
+
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Gget_info_by_idx_fileid() {
+        H5G_info_t info = null;
+        int gid = _createGroup(H5fid, "/testH5Gcreate");
+        assertTrue(gid > 0);
+        try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+        try {
+            info = H5.H5Gget_info_by_idx(H5fid, "/",
+                    HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0,
+                    HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info_by_idx: " + err);
+        }
+        assertNotNull(info);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Giterate.java b/sourceTest/java/test/hdf5lib/TestH5Giterate.java
new file mode 100644
index 0000000..362c467
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Giterate.java
@@ -0,0 +1,117 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Giterate {
+    private static final String H5_FILE = "sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf";
+    int H5fid = -1;
+
+    private final int _openGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            gid = H5.H5Gopen(fid, name, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            gid = -1;
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+
+        return gid;
+    }
+
+    @Before
+    public void openH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+                HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Fopen: openH5file: " + err);
+        }
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Gget_obj_info_all() {
+        H5G_info_t info = null;
+
+        int gid = _openGroup(H5fid, "/");
+
+        try {
+            info = H5.H5Gget_info(gid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_info: " + err);
+        }
+        try {
+            H5.H5Gclose(gid);
+        }
+        catch (Exception ex) {
+        }
+        assertNotNull(info);
+        assertTrue("number of links is empty", info.nlinks > 0);
+        String objNames[] = new String[(int) info.nlinks];
+        int objTypes[] = new int[(int) info.nlinks];
+        int lnkTypes[] = new int[(int) info.nlinks];
+        long objRefs[] = new long[(int) info.nlinks];
+
+        int names_found = 0;
+        try {
+            names_found = H5.H5Gget_obj_info_all(H5fid, "/", objNames,
+                    objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gget_obj_info_all: " + err);
+        }
+
+        assertTrue("number found[" + names_found + "] different than expected["
+                + objNames.length + "]", names_found == objNames.length);
+        for (int i = 0; i < objNames.length; i++) {
+            assertNotNull("name #" + i + " does not exist", objNames[i]);
+            assertTrue(objNames[i].length() > 0);
+            if (objTypes[i]==HDF5Constants.H5O_TYPE_GROUP) {
+                assertTrue("Group is index: "+i + " ",i==2);
+                assertTrue("Group is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("G1")==0);
+            }
+            else if (objTypes[i]==HDF5Constants.H5O_TYPE_DATASET) {
+                assertTrue("Dataset is index: "+i + " ",(i==0)||(i==3));
+                if(i==0)
+                    assertTrue("Dataset is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("DS1")==0);
+                else
+                    assertTrue("Dataset is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("L1")==0);
+            }
+            else if (objTypes[i]==HDF5Constants.H5O_TYPE_NAMED_DATATYPE) {
+                assertTrue("Datatype is index: "+i + " ",i==1);
+                assertTrue("Datatype is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("DT1")==0);
+            }
+            else {
+                fail("  Unknown at index: " + i + " " + objNames[i]);
+            }
+        }
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Lbasic.java b/sourceTest/java/test/hdf5lib/TestH5Lbasic.java
new file mode 100644
index 0000000..ad78f7a
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Lbasic.java
@@ -0,0 +1,352 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5L_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Lbasic {
+    private static final String H5_FILE = "sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf";
+    int H5fid = -1;
+
+    @Before
+    public void openH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+                HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Fopen: openH5file: " + err);
+        }
+    }
+
+    @After
+    public void closeH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Lexists() {
+        boolean link_exists = false;
+        try {
+            link_exists = H5.H5Lexists(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+        assertFalse("H5Lexists ",link_exists);
+        try {
+            link_exists = H5.H5Lexists(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+        assertTrue("H5Lexists ",link_exists);
+        try {
+            link_exists = H5.H5Lexists(H5fid, "G1/DS2", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+        assertTrue("H5Lexists ",link_exists);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_not_exist() throws Throwable {
+        H5.H5Lget_info(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lget_info_dataset() {
+        H5L_info_t link_info = null;
+        try {
+            link_info = H5.H5Lget_info(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertFalse("H5Lget_info ",link_info==null);
+        assertTrue("H5Lget_info link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+    }
+
+    @Test
+    public void testH5Lget_info_hardlink() {
+        H5L_info_t link_info = null;
+        try {
+            link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertFalse("H5Lget_info ",link_info==null);
+        assertTrue("H5Lget_info link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+        assertTrue("Link Address ",link_info.address_val_size>0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_by_idx_name_not_exist_name() throws Throwable {
+        H5.H5Lget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_by_idx_name_not_exist_create() throws Throwable {
+        H5.H5Lget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_by_idx_not_exist_name() throws Throwable {
+        H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_by_idx_not_exist_create() throws Throwable {
+        H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lget_info_by_idx_n0() {
+        H5L_info_t link_info = null;
+        H5L_info_t link_info2 = null;
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("H5Lget_info_by_idx ",link_info==null);
+        assertTrue("H5Lget_info_by_idx link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+        try {
+            link_info2 = H5.H5Lget_info(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertTrue("Link Address ",link_info.address_val_size==link_info2.address_val_size);
+    }
+
+    @Test
+    public void testH5Lget_info_by_idx_n3() {
+        H5L_info_t link_info = null;
+        H5L_info_t link_info2 = null;
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("H5Lget_info_by_idx ",link_info==null);
+        assertTrue("H5Lget_info_by_idx link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+        try {
+            link_info2 = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertTrue("Link Address ",link_info.address_val_size==link_info2.address_val_size);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_name_by_idx_not_exist() throws Throwable {
+        H5.H5Lget_name_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lget_name_by_idx_n0() {
+        String link_name = null;
+        try {
+            link_name = H5.H5Lget_name_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_name_by_idx: " + err);
+        }
+        assertFalse("H5Lget_name_by_idx ",link_name==null);
+        assertTrue("Link Name ",link_name.compareTo("DS1")==0);
+    }
+
+    @Test
+    public void testH5Lget_name_by_idx_n3() {
+        String link_name = null;
+        try {
+            link_name = H5.H5Lget_name_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_name_by_idx: " + err);
+        }
+        assertFalse("H5Lget_name_by_idx ",link_name==null);
+        assertTrue("Link Name ",link_name.compareTo("L1")==0);
+    }
+
+    @Test
+    public void testH5Lvisit() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Lvisit(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lvisit: " + err);
+        }
+        assertFalse("H5Lvisit ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Lvisit "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==5);
+        assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(1)).link_name,(((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DT1")==0);
+        assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(2)).link_name,(((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(3)).link_name,(((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2")==0);
+        assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(4)).link_name,(((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("L1")==0);
+    }
+
+    @Test
+    public void testH5Lvisit_by_name() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Lvisit_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lvisit_by_name: " + err);
+        }
+        assertFalse("H5Lvisit_by_name ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Lvisit_by_name "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==1);
+        assertTrue("H5Lvisit_by_name "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS2")==0);
+    }
+
+    @Test
+    public void testH5Literate() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Literate(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Literate: " + err);
+        }
+        assertFalse("H5Literate ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Literate "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==4);
+        assertTrue("H5Literate "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Literate "+(((H5L_iter_data)iter_data).iterdata.get(1)).link_name,(((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DT1")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,(((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("L1")==0);
+    }
+
+    @Test
+    public void testH5Literate_by_name() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Literate_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Literate_by_name: " + err);
+        }
+        assertFalse("H5Literate_by_name ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Literate_by_name "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==1);
+        assertTrue("H5Literate_by_name "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS2")==0);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Lcreate.java b/sourceTest/java/test/hdf5lib/TestH5Lcreate.java
new file mode 100644
index 0000000..49fe6cc
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Lcreate.java
@@ -0,0 +1,799 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5L_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5L_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Lcreate {
+    private static final String H5_EXTFILE = "test/hdf5lib/h5ex_g_iterate.hdf";
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    int H5fcpl = -1;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did1 = -1;
+    int H5did2 = -1;
+    int H5gcpl = -1;
+    int H5gid = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5L._createDataset: ",did > 0);
+
+        return did;
+    }
+
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            H5gcpl = HDF5Constants.H5P_DEFAULT;
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                    H5gcpl, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+        assertTrue("TestH5L._createGroup: ",gid > 0);
+
+        return gid;
+    }
+
+    private final void _createHardLink(int fid, int cid, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_hard(cid, curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_hard: " + err);
+        }
+        assertTrue("TestH5L._createHardLink ", link_exists);
+    }
+
+    private final void _createSoftLink(int fid, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_soft(curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_soft: " + err);
+        }
+        assertTrue("TestH5L._createSoftLink ", link_exists);
+    }
+
+    private final void _createExternalLink(int fid, String ext_filename, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_external(ext_filename, curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_external: " + err);
+        }
+        assertTrue("TestH5L._createExternalLink ", link_exists);
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+        try {
+            H5fcpl = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+            H5.H5Pset_link_creation_order(H5fcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    H5fcpl, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did1 = _createDataset(H5fid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+            H5gid = _createGroup(H5fid, "/G1");
+            H5did2 = _createDataset(H5gid, H5dsid, "DS2", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5L.createH5file: " + err);
+        }
+        assertTrue("TestH5L.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5L.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+        assertTrue("TestH5L.createH5file: H5.H5Gcreate: ",H5gid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5gid > 0) 
+            try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+        if (H5gcpl > 0) 
+            try {H5.H5Pclose(H5gcpl);} catch (Exception ex) {}
+        if (H5did2 > 0) 
+            try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did1 > 0) 
+            try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        if (H5fcpl > 0) 
+            try {H5.H5Pclose(H5fcpl);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+    }
+
+    @Test
+    public void testH5Lget_info_by_idx_n0_create() {
+        H5L_info_t link_info = null;
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx_n0_create:H5Pget_link_creation_order " + err);
+        }
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("H5Lget_info_by_idx ", link_info==null);
+        assertTrue("H5Lget_info_by_idx link type", link_info.type==HDF5Constants.H5L_TYPE_HARD);
+    }
+
+    @Test
+    public void testH5Lget_info_by_idx_n1_create() {
+        H5L_info_t link_info = null;
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx_n1_create:H5Pget_link_creation_order " + err);
+        }
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("H5Lget_info_by_idx ", link_info==null);
+        assertTrue("H5Lget_info_by_idx link type", link_info.type==HDF5Constants.H5L_TYPE_HARD);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_hard_cur_not_exists() throws Throwable {
+        H5.H5Lcreate_hard(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lcreate_hard() {
+        try {
+            H5.H5Lcreate_hard(H5fid, "DS1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lcreate_hard:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_hard_dst_link_exists() throws Throwable {
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Lcreate_hard(H5fid, "L1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Ldelete_hard_link() {
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            H5.H5Ldelete(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            assertFalse("testH5Lcreate_hard:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Lcreate_soft() {
+        try {
+            H5.H5Lcreate_soft("DS1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lcreate_soft:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_soft_dst_link_exists() throws Throwable {
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Lcreate_soft("L1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Ldelete_soft_link() {
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            H5.H5Ldelete(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            assertFalse("testH5Lcreate_soft:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Lget_info_softlink() {
+        H5L_info_t link_info = null;
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertFalse("H5Lget_info ", link_info==null);
+        assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        assertTrue("Link Address ", link_info.address_val_size>0);
+    }
+
+    @Test
+    public void testH5Lget_val_soft() {
+        String[] link_value = {null, null};
+        int link_type = -1;
+        
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_type = H5.H5Lget_val(H5fid, "L1", link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+        assertFalse("H5Lget_val ", link_value[0]==null);
+        assertTrue("Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+    }
+
+    @Test
+    public void testH5Lcreate_soft_dangle() {
+        try {
+            H5.H5Lcreate_soft("DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lcreate_soft:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Ldelete_soft_link_dangle() {
+        _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            H5.H5Ldelete(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+            assertFalse("testH5Lcreate_soft:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Lget_info_softlink_dangle() {
+        H5L_info_t link_info = null;
+        _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertFalse("H5Lget_info ", link_info==null);
+        assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        assertTrue("Link Address ", link_info.address_val_size>0);
+    }
+
+    @Test
+    public void testH5Lget_val_dangle() {
+        String[] link_value = {null,null};
+        int link_type = -1;
+
+        _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_type = H5.H5Lget_val(H5fid, "L2", link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+        assertFalse("H5Lget_val ", link_value[0]==null);
+        assertTrue("Link Value ", link_value[0].compareTo("DS3")==0);
+    }
+
+    @Test
+    public void testH5Lcreate_external() {
+        try {
+            H5.H5Lcreate_external(H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lcreate_external:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lexists: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Lget_info_externallink() {
+        H5L_info_t link_info = null;
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info: " + err);
+        }
+        assertFalse("H5Lget_info ", link_info==null);
+        assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+        assertTrue("Link Address ", link_info.address_val_size>0);
+    }
+
+    @Test
+    public void testH5Lget_val_external(){
+        String[] link_value = {null,null};
+        int link_type = -1;
+        
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_type = H5.H5Lget_val(H5fid, "L1", link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+        assertFalse("H5Lget_val ", link_value[0]==null);
+        assertFalse("H5Lget_val ", link_value[1]==null);
+        assertTrue("Link Value ", link_value[0].compareTo("DT1")==0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcopy_cur_not_exists() throws Throwable {
+        H5.H5Lcopy(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lcopy() {
+        try {
+            H5.H5Lcopy(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lcopy:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Lcopy:H5Lexists: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcopy_dst_link_exists() throws Throwable {
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Lcopy(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lmove_cur_not_exists() throws Throwable {
+        H5.H5Lmove(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lmove() {
+        try {
+            H5.H5Lmove(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Lmove:H5Lexists ", link_exists);
+            link_exists = H5.H5Lexists(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+            assertFalse("testH5Lmove:H5Lexists ", link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Lmove:H5Lexists: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lmove_dst_link_exists() throws Throwable {
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Lmove(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_val_by_idx_not_exist_name() throws Throwable {
+        String[] link_value = {null,null};
+        H5.H5Lget_val_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, link_value, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_val_by_idx_not_exist_create() throws Throwable {
+        String[] link_value = {null,null};
+        H5.H5Lget_val_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, link_value, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Lget_val_by_idx_n2_name() {
+        H5L_info_t link_info = null;
+        String[] link_value = {null,null};
+        int link_type = -1;
+        
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Lget_val_by_idx_n2 ",link_info==null);
+        assertTrue("testH5Lget_val_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        try {
+            link_type = H5.H5Lget_val_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val_by_idx: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+        assertFalse("testH5Lget_val_by_idx_n2 ", link_value[0]==null);
+        assertTrue("testH5Lget_val_by_idx_n2 Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+    }
+
+    @Test
+    public void testH5Lget_val_by_idx_n2_create() {
+        H5L_info_t link_info = null;
+        String[] link_value = {null,null};
+        int link_type = -1;
+ 
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val_by_idx_n2_create: H5Pget_link_creation_order " + err);
+        }
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Lget_val_by_idx_n2 ", link_info==null);
+        assertTrue("testH5Lget_val_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        try {
+            link_type = H5.H5Lget_val_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val_by_idx: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+        assertFalse("testH5Lget_val_by_idx_n2 ", link_value[0]==null);
+        assertTrue("testH5Lget_val_by_idx_n2 Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+    }
+
+    @Test
+    public void testH5Lget_val_by_idx_external_name() {
+        H5L_info_t link_info = null;
+        String[] link_value = {null,null};
+        int link_type = -1;
+        
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Lget_val_by_idx_ext ", link_info==null);
+        assertTrue("testH5Lget_val_by_idx_ext link type "+link_info.type, link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+        try {
+            link_type = H5.H5Lget_val_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val_by_idx: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+        assertFalse("testH5Lget_val_by_idx_ext ", link_value[0]==null);
+        assertFalse("testH5Lget_val_by_idx_ext ", link_value[1]==null);
+        assertTrue("testH5Lget_val_by_idx_ext Link Value ", link_value[0].compareTo("DT1")==0);
+    }
+
+    @Test
+    public void testH5Lget_val_by_idx_external_create() {
+        H5L_info_t link_info = null;
+        String[] link_value = {null,null};
+        int link_type = -1;
+        
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Lget_val_by_idx_ext ", link_info==null);
+        assertTrue("testH5Lget_val_by_idx_ext link type "+link_info.type, link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+        try {
+            link_type = H5.H5Lget_val_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_val_by_idx: " + err);
+        }
+        assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+        assertFalse("testH5Lget_val_by_idx_ext ", link_value[0]==null);
+        assertFalse("testH5Lget_val_by_idx_ext ", link_value[1]==null);
+        assertTrue("testH5Lget_val_by_idx_ext Link Value ", link_value[0].compareTo("DT1")==0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ldelete_by_idx_not_exist_name() throws Throwable {
+        H5.H5Ldelete_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ldelete_by_idx_not_exist_create() throws Throwable {
+        H5.H5Ldelete_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Ldelete_by_idx_n2_name() {
+        H5L_info_t link_info = null;
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Ldelete_by_idx_n2 ", link_info==null);
+        assertTrue("testH5Ldelete_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        try {
+            H5.H5Ldelete_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ldelete_by_idx: " + err);
+        }
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (HDF5LibraryException err) {
+            link_info = null;
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ldelete_by_idx: " + err);
+        }
+        assertTrue("testH5Ldelete_by_idx_n2 ",link_info==null);
+    }
+
+    @Test
+    public void testH5Ldelete_by_idx_n2_create() {
+        H5L_info_t link_info = null;
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lget_info_by_idx: " + err);
+        }
+        assertFalse("testH5Ldelete_by_idx_n2 ", link_info==null);
+        assertTrue("testH5Ldelete_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+        try {
+            H5.H5Ldelete_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ldelete_by_idx: " + err);
+        }
+        try {
+            link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (HDF5LibraryException err) {
+            link_info = null;
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ldelete_by_idx: " + err);
+        }
+        assertTrue("testH5Ldelete_by_idx_n2 ",link_info==null);
+    }
+
+    @Test
+    public void testH5Lvisit_create() {
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lvisit_create: H5Pget_link_creation_order " + err);
+        }
+        
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Lvisit(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lvisit: " + err);
+        }
+        assertFalse("H5Lvisit ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Lvisit "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==6);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1/DS2")==0);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("CPY1")==0);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("LE")==0);
+        assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(5)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(5)).link_name.compareToIgnoreCase("LS")==0);
+    }
+
+    @Test
+    public void testH5Literate_create() {
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Literate_create: H5Pget_link_creation_order " + err);
+        }
+        
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5L_iter_data implements H5L_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5L_iterate_t iter_data = new H5L_iter_data();
+        class H5L_iter_callback implements H5L_iterate_cb {
+            public int callback(int group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5L_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5L_iterate_cb iter_cb = new H5L_iter_callback();
+        try {
+            H5.H5Literate(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Literate: " + err);
+        }
+        assertFalse("H5Literate ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Literate "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==5);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("CPY1")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("LE")==0);
+        assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("LS")==0);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Lparams.java b/sourceTest/java/test/hdf5lib/TestH5Lparams.java
new file mode 100644
index 0000000..b1a7d0a
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Lparams.java
@@ -0,0 +1,208 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Lparams {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_val_invalid() throws Throwable {
+        H5.H5Lget_val(-1, "Bogus", null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lget_val_null() throws Throwable {
+        H5.H5Lget_val(-1, null, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lexists_invalid() throws Throwable {
+        H5.H5Lexists(-1, "Bogus", -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lexists_null() throws Throwable {
+        H5.H5Lexists(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_invalid() throws Throwable {
+        H5.H5Lget_info(-1, "Bogus", -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lget_info_null() throws Throwable {
+        H5.H5Lget_info(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_info_by_idx_invalid() throws Throwable {
+        H5.H5Lget_info_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lget_info_by_idx_null() throws Throwable {
+        H5.H5Lget_info_by_idx(-1, null, 0, 0, 0L, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_name_by_idx_invalid() throws Throwable {
+        H5.H5Lget_name_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lget_name_by_idx_null() throws Throwable {
+        H5.H5Lget_name_by_idx(-1, null, 0, 0, 0L, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_hard_invalid() throws Throwable {
+        H5.H5Lcreate_hard(-1, "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_hard_null_current() throws Throwable {
+        H5.H5Lcreate_hard(-1, null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_hard_null_dest() throws Throwable {
+        H5.H5Lcreate_hard(-1, "Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ldelete_invalid() throws Throwable {
+        H5.H5Ldelete(-1, "Bogus", -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ldelete_null() throws Throwable {
+        H5.H5Ldelete(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_soft_invalid() throws Throwable {
+        H5.H5Lcreate_soft( "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_soft_null_current() throws Throwable {
+        H5.H5Lcreate_soft(null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_soft_null_dest() throws Throwable {
+        H5.H5Lcreate_soft("Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcreate_external_invalid() throws Throwable {
+        H5.H5Lcreate_external("PathToFile", "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_external_null_file() throws Throwable {
+        H5.H5Lcreate_external(null, "Bogus", 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_external_null_current() throws Throwable {
+        H5.H5Lcreate_external("PathToFile", null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcreate_external_null_dest() throws Throwable {
+        H5.H5Lcreate_external("PathToFile", "Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lcopy_invalid() throws Throwable {
+        H5.H5Lcopy(-1, "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcopy_null_current() throws Throwable {
+        H5.H5Lcopy(-1, null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lcopy_null_dest() throws Throwable {
+        H5.H5Lcopy(-1, "Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lmove_invalid() throws Throwable {
+        H5.H5Lmove(-1, "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lmove_null_current() throws Throwable {
+        H5.H5Lmove(-1, null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lmove_null_dest() throws Throwable {
+        H5.H5Lmove(-1, "Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Lget_val_by_idx_invalid() throws Throwable {
+        H5.H5Lget_val_by_idx(-1, "Bogus", -1, -1, -1L, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lget_val_by_idx_null() throws Throwable {
+        H5.H5Lget_val_by_idx(-1, null, 0, 0, 0L, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ldelete_by_idx_invalid() throws Throwable {
+        H5.H5Ldelete_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ldelete_by_idx_null() throws Throwable {
+        H5.H5Ldelete_by_idx(-1, null, 0, 0, 0L, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lvisit_null() throws Throwable {
+        H5.H5Lvisit(-1, -1, -1, null, null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lvisit_by_name_nullname() throws Throwable {
+        H5.H5Lvisit_by_name(-1, null, -1, -1, null, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Lvisit_by_name_null() throws Throwable {
+        H5.H5Lvisit_by_name(-1, "Bogus", -1, -1, null, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Literate_null() throws Throwable {
+        H5.H5Literate(-1, -1, -1, -1, null, null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Literate_by_name_nullname() throws Throwable {
+        H5.H5Literate_by_name(-1, null, -1, -1, -1, null, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Literate_by_name_null() throws Throwable {
+        H5.H5Literate_by_name(-1, "Bogus", -1, -1, -1, null, null, -1);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Obasic.java b/sourceTest/java/test/hdf5lib/TestH5Obasic.java
new file mode 100644
index 0000000..824f7a2
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Obasic.java
@@ -0,0 +1,328 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5O_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Obasic {
+    private static final String H5_FILE = "sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf";
+    private static long H5la_ds1 = -1;
+    private static long H5la_l1 = -1;
+    private static long H5la_dt1 = -1;
+    private static long H5la_g1 = -1;
+    int H5fid = -1;
+
+    @Before
+    public void openH5file()
+            throws HDF5LibraryException, NullPointerException {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+                HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Fopen: openH5file: " + err);
+        }
+    }
+
+    @After
+    public void closeH5file() throws HDF5LibraryException {
+        if (H5fid > 0) {
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oopen_not_exists() throws Throwable {
+        int oid = -1;
+
+        oid = H5.H5Oopen(H5fid, "Never_created", HDF5Constants.H5P_DEFAULT);
+
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Oget_info_dataset() {
+        int oid = -1;
+        H5O_info_t obj_info = null;
+        
+        try {
+            oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+            obj_info = H5.H5Oget_info(oid);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        H5la_ds1 = obj_info.addr;
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Oget_info_hardlink() {
+        int oid = -1;
+        H5O_info_t obj_info = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+            obj_info = H5.H5Oget_info(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        H5la_l1 = obj_info.addr;
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Oget_info_group() {
+        int oid = -1;
+        H5O_info_t obj_info = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+            obj_info = H5.H5Oget_info(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+        H5la_g1 = obj_info.addr;
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Oget_info_datatype() {
+        int oid = -1;
+        H5O_info_t obj_info = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "DT1", HDF5Constants.H5P_DEFAULT);
+            obj_info = H5.H5Oget_info(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+        H5la_dt1 = obj_info.addr;
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_name_not_exist_name() throws Throwable {
+        H5.H5Oget_info_by_name(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_name_not_exists() throws Throwable {
+        H5.H5Oget_info_by_name(H5fid, "Bogus", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Oget_info_by_name_dataset() {
+        H5O_info_t obj_info = null;
+        
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        H5la_ds1 = obj_info.addr;
+    }
+
+    @Test
+    public void testH5Oget_info_by_name_hardlink() {
+        H5O_info_t obj_info = null;
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        H5la_l1 = obj_info.addr;
+    }
+
+    @Test
+    public void testH5Oget_info_by_name_group() {
+        H5O_info_t obj_info = null;
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+        H5la_g1 = obj_info.addr;
+    }
+
+    @Test
+    public void testH5Oget_info_by_name_datatype() {
+        H5O_info_t obj_info = null;
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "DT1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ",obj_info==null);
+        assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+        H5la_dt1 = obj_info.addr;
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_idx_name_not_exist_name() throws Throwable {
+        H5.H5Oget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_idx_name_not_exist_create() throws Throwable {
+        H5.H5Oget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_idx_not_exist_name() throws Throwable {
+        H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_idx_not_exist_create() throws Throwable {
+        H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Oget_info_by_idx_n0() {
+        H5O_info_t obj_info = null;
+        try {
+            obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx: " + err);
+        }
+        assertFalse("H5Oget_info_by_idx ",obj_info==null);
+        assertTrue("H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        assertTrue("Link Address ",obj_info.addr==H5la_ds1);
+    }
+
+    @Test
+    public void testH5Oget_info_by_idx_n3() {
+        H5O_info_t obj_info = null;
+        try {
+            obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx: " + err);
+        }
+        assertFalse("H5Oget_info_by_idx ",obj_info==null);
+        assertTrue("H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        assertTrue("Link Address ",obj_info.addr==H5la_l1);
+    }
+
+    @Test
+    public void testH5Ovisit() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5O_iter_data implements H5O_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5O_iterate_t iter_data = new H5O_iter_data();
+        class H5O_iter_callback implements H5O_iterate_cb {
+            public int callback(int group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5O_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5O_iterate_cb iter_cb = new H5O_iter_callback();
+        try {
+            H5.H5Ovisit(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ovisit: " + err);
+        }
+        assertFalse("H5Ovisit ",((H5O_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Ovisit "+((H5O_iter_data)iter_data).iterdata.size(),((H5O_iter_data)iter_data).iterdata.size()==5);
+        assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(0)).link_name,(((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+        assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(1)).link_name,(((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(2)).link_name,(((H5O_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("DT1")==0);
+        assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(3)).link_name,(((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(4)).link_name,(((H5O_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("G1/DS2")==0);
+//        assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(5)).link_name,((idata)((H5O_iter_data)iter_data).iterdata.get(5)).link_name.compareToIgnoreCase("L1")==0);
+    }
+
+    @Test
+    public void testH5Ovisit_by_name() {
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5O_iter_data implements H5O_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5O_iterate_t iter_data = new H5O_iter_data();
+        class H5O_iter_callback implements H5O_iterate_cb {
+            public int callback(int group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5O_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5O_iterate_cb iter_cb = new H5O_iter_callback();
+        try {
+            H5.H5Ovisit_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ovisit_by_name: " + err);
+        }
+        assertFalse("H5Ovisit_by_name ",((H5O_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Ovisit_by_name "+((H5O_iter_data)iter_data).iterdata.size(),((H5O_iter_data)iter_data).iterdata.size()==2);
+        assertTrue("H5Ovisit_by_name "+(((H5O_iter_data)iter_data).iterdata.get(0)).link_name,(((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+        assertTrue("H5Ovisit_by_name "+(((H5O_iter_data)iter_data).iterdata.get(1)).link_name,(((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS2")==0);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Ocopy.java b/sourceTest/java/test/hdf5lib/TestH5Ocopy.java
new file mode 100644
index 0000000..04aa75d
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Ocopy.java
@@ -0,0 +1,293 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Ocopy {
+    private static final String FILENAME = "testRefsattribute.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did1 = -1;
+    int H5did2 = -1;
+    int H5gcpl = -1;
+    int H5gid = -1;
+    int H5dsid2 = -1;
+    long[] dims = { 2 };
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5O._createDataset: ",did > 0);
+
+        return did;
+    }
+
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            H5gcpl = HDF5Constants.H5P_DEFAULT;
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                    H5gcpl, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+        assertTrue("TestH5O._createGroup: ",gid > 0);
+
+        return gid;
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+        try {
+            H5fid = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5dsid2 =  H5.H5Screate(HDF5Constants.H5S_SCALAR);
+            H5did1 = _createDataset(H5fid, H5dsid2, "DS2", HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(1, dims, null);         
+            H5gid = _createGroup(H5fid, "/G1");
+            H5did2 = _createDataset(H5gid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5O.createH5file: " + err);
+        }
+        assertTrue("TestH5O.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5O.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+        assertTrue("TestH5O.createH5file: H5.H5Gcreate: ",H5gid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5gid > 0) 
+            try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+        if (H5did2 > 0) 
+            try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5dsid2 > 0) 
+            try {H5.H5Sclose(H5dsid2);} catch (Exception ex) {}
+        if (H5did1 > 0) 
+            try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+        _deleteFile(FILENAME);
+    }
+    
+    @Test
+    public void testH5OcopyRefsAttr() {
+        int ocp_plist_id = -1;
+        byte rbuf0[]=null , rbuf1[] = null;
+        byte[] dset_data = new byte[16];
+        int attribute_id = -1;
+        
+        
+        try{
+            rbuf0 = H5.H5Rcreate(H5fid, "/G1", HDF5Constants.H5R_OBJECT, -1);
+            rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+            //System.arraycopy(rbuf0, 0, dset_data, 0, 8);
+            System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+
+            attribute_id = H5.H5Acreate(H5did2, "A1", HDF5Constants.H5T_STD_REF_OBJ, H5dsid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+            
+            H5.H5Aclose(attribute_id);
+            
+            ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+            H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+            H5.H5Ocopy(H5fid, ".", H5fid, "CPYREF", ocp_plist_id, HDF5Constants.H5P_DEFAULT);        
+        }
+        catch (Exception ex){
+            fail("testH5OcopyRefsAttr: H5Ocopy failed");
+        }
+        try{
+            H5.H5Pclose(ocp_plist_id);
+        }
+        catch (Exception ex){
+        }
+    }
+    
+    @Test
+    public void testH5OcopyRefsDatasettodiffFile() {
+        int ocp_plist_id = -1;
+        byte rbuf1[] = null;
+        byte[] dset_data = new byte[16];
+        int dataset_id = -1;        
+        
+        try{
+            rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+            System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+            
+            dataset_id = H5.H5Dcreate(H5fid, "DSREF",
+                    HDF5Constants.H5T_STD_REF_OBJ, H5dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ,
+                    HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    HDF5Constants.H5P_DEFAULT, dset_data);
+            H5.H5Dclose(dataset_id);
+            
+            //create new file
+            int H5fid2 = H5.H5Fcreate("copy.h5", HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid2, HDF5Constants.H5F_SCOPE_LOCAL);
+            
+            //create object copy property list id and set the flags.
+            ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+            H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+            
+            //Perform copy function.
+            H5.H5Ocopy(H5fid, ".", H5fid2, "CPYREFD", ocp_plist_id, HDF5Constants.H5P_DEFAULT);
+            
+            //Close file.
+            H5.H5Fclose(H5fid2);
+            
+        }
+        catch (Exception ex){
+            ex.printStackTrace();
+            fail("testH5OcopyRefsDatasettodiffFile: H5Ocopy failed");
+        }
+        try{
+            H5.H5Pclose(ocp_plist_id);
+        }
+        catch (Exception ex){
+        }
+    }
+    
+    @Test
+    public void testH5OcopyRefsDatasettosameFile() {
+        int ocp_plist_id = -1;
+        byte rbuf0[]=null , rbuf1[] = null;
+        byte[] dset_data = new byte[16];
+        int dataset_id = -1;
+        int[] otype = { 1 };
+        int obj_type = -1;
+        byte[] read_data = new byte[16];
+        int did = -1;
+                
+        try{
+            rbuf0 = H5.H5Rcreate(H5fid, "/G1", HDF5Constants.H5R_OBJECT, -1);
+            rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+            System.arraycopy(rbuf0, 0, dset_data, 0, 8);
+            System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+            
+            //Create a dataset and write object references to it.
+            dataset_id = H5.H5Dcreate(H5fid, "DSREF",
+                    HDF5Constants.H5T_STD_REF_OBJ, H5dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ,
+                    HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    HDF5Constants.H5P_DEFAULT, dset_data);
+            //Close the dataset.
+               H5.H5Dclose(dataset_id);
+               
+               ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+            H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+           
+            //Perform copy function.
+               try{
+            H5.H5Ocopy(H5fid, "DSREF", H5fid, "CPYREFD", ocp_plist_id, HDF5Constants.H5P_DEFAULT);
+               }
+               catch(Exception ex){
+                fail("testH5OcopyRefsDatasettosameFile: H5Ocopy failed");
+            }
+
+            //Open the dataset that has been copied
+            try {
+                did = H5.H5Dopen(H5fid, "DSREF", HDF5Constants.H5P_DEFAULT);
+            }
+            catch (Exception e) {
+                e.printStackTrace();
+            }
+            //Read the dataset object references in the read_data buffer.
+            H5.H5Dread(did, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL,HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_data);
+            System.arraycopy(read_data, 0, rbuf0, 0, 8);
+            System.arraycopy(read_data, 8, rbuf1, 0, 8);    
+            
+            //Get the type of object the reference points to.
+            obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, rbuf1, otype);
+            assertEquals(obj_type, HDF5Constants.H5O_TYPE_DATASET);
+            
+            obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, rbuf0, otype);
+            assertEquals(obj_type, HDF5Constants.H5O_TYPE_GROUP);
+                
+            //close the dataset
+            H5.H5Dclose(did);
+        }
+        catch (Exception ex){
+            ex.printStackTrace();
+        }
+        try{
+            H5.H5Pclose(ocp_plist_id);
+        }
+        catch (Exception ex){
+        }
+    }
+    
+    @Test
+    public void testH5OcopyInvalidRef() { 
+    	final int _pid_ = HDF5Constants.H5P_DEFAULT;
+
+    	try {
+    		int sid = H5.H5Screate_simple(1, new long[] {1}, null);
+    		int did = H5.H5Dcreate(H5fid, "Dataset_with_invalid_Ref", HDF5Constants.H5T_NATIVE_INT, sid, _pid_, _pid_, _pid_);
+    		int aid = H5.H5Acreate(did, "Invalid_Ref", HDF5Constants.H5T_STD_REF_OBJ, sid, _pid_, _pid_);
+    		H5.H5Awrite(aid, HDF5Constants.H5T_STD_REF_OBJ, new long[]{-1});
+    		H5.H5Dclose(did);
+    		H5.H5Aclose(aid);
+    		H5.H5Sclose(sid);
+    	} catch (Exception ex) {}			
+
+    	try {
+    		int ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+    		H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+    		try {
+    			H5.H5Ocopy(H5fid, "/Dataset_with_invalid_Ref", H5fid, "/Dataset_with_invalid_Ref_cp", ocp_plist_id, _pid_);
+    		} finally { H5.H5Pclose(ocp_plist_id);}
+
+    	} catch (Exception ex) {}
+    }        
+ 
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Ocreate.java b/sourceTest/java/test/hdf5lib/TestH5Ocreate.java
new file mode 100644
index 0000000..62a1477
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Ocreate.java
@@ -0,0 +1,489 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import ncsa.hdf.hdf5lib.callbacks.H5O_iterate_t;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5O_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Ocreate {
+    private static final String H5_EXTFILE = "sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf";
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    int H5fcpl = -1;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did1 = -1;
+    int H5did2 = -1;
+    int H5gcpl = -1;
+    int H5gid = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5O._createDataset: ",did > 0);
+
+        return did;
+    }
+
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            H5gcpl = HDF5Constants.H5P_DEFAULT;
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                    H5gcpl, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+        assertTrue("TestH5O._createGroup: ",gid > 0);
+
+        return gid;
+    }
+
+    private final void _createHardLink(int fid, int cid, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_hard(cid, curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_hard: " + err);
+        }
+        assertTrue("TestH5O._createHardLink ", link_exists);
+    }
+
+    private final void _createSoftLink(int fid, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_soft(curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_soft: " + err);
+        }
+        assertTrue("TestH5O._createSoftLink ", link_exists);
+    }
+
+    private final void _createExternalLink(int fid, String ext_filename, String curname, int did, String dstname, int lcpl, int lapl) {
+        boolean link_exists = false;
+        try {
+            H5.H5Lcreate_external(ext_filename, curname, did, dstname, lcpl, lapl);
+            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            link_exists = H5.H5Lexists(did, dstname, lapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Lcreate_external: " + err);
+        }
+        assertTrue("TestH5O._createExternalLink ", link_exists);
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+        try {
+            H5fcpl = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+            H5.H5Pset_link_creation_order(H5fcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    H5fcpl, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did1 = _createDataset(H5fid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+            H5gid = _createGroup(H5fid, "/G1");
+            H5did2 = _createDataset(H5gid, H5dsid, "DS2", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5O.createH5file: " + err);
+        }
+        assertTrue("TestH5O.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5O.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+        assertTrue("TestH5O.createH5file: H5.H5Gcreate: ",H5gid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5gid > 0) 
+            try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+        if (H5gcpl > 0) 
+            try {H5.H5Pclose(H5gcpl);} catch (Exception ex) {}
+        if (H5did2 > 0) 
+            try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did1 > 0) 
+            try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        if (H5fcpl > 0) 
+            try {H5.H5Pclose(H5fcpl);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ocopy_cur_not_exists() throws Throwable {
+        H5.H5Ocopy(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Ocopy() {
+        try {
+            H5.H5Ocopy(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+            boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Ocopy:H5Lexists ",link_exists);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ocopy: " + err);
+        }
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ocopy_dst_link_exists() throws Throwable {
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Ocopy(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Oget_info_by_idx_n0_create() {
+        H5O_info_t obj_info = null;
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx_n0:H5Pget_link_creation_order " + err);
+        }
+        try {
+            obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx: " + err);
+        }
+        assertFalse("H5Oget_info_by_idx ", obj_info==null);
+        assertTrue("H5Oget_info_by_idx link type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+    }
+
+    @Test
+    public void testH5Oget_info_by_idx_n1_create() {
+        H5O_info_t obj_info = null;
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx_n1:H5Pget_link_creation_order " + err);
+        }
+        try {
+            obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_idx: " + err);
+        }
+        assertFalse("H5Oget_info_by_idx ", obj_info==null);
+        assertTrue("H5Oget_info_by_idx link type", obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+    }
+
+    @Test
+    public void testH5Oget_info_softlink() {
+        H5O_info_t obj_info = null;
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ", obj_info==null);
+        assertTrue("H5Oget_info link type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        assertTrue("Link Address ", obj_info.addr>0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_softlink_dangle() throws Throwable {
+        _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        H5.H5Oget_info_by_name(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test
+    public void testH5Oget_info_externallink() {
+        H5O_info_t obj_info = null;
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        try {
+            obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        assertFalse("H5Oget_info ", obj_info==null);
+        assertTrue("H5Oget_info link type", obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+        assertTrue("Link Address ", obj_info.addr>0);
+    }
+
+    @Test
+    public void testH5Olink() {
+        int oid = -1;
+        H5O_info_t obj_info = null;
+        H5O_info_t dst_obj_info = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+            obj_info = H5.H5Oget_info(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info: " + err);
+        }
+        try {
+            H5.H5Olink(oid, H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Olink: " + err);
+        }
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+
+        assertFalse("H5Oget_info ", obj_info==null);
+        assertTrue("H5Oget_info object type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+        
+        try {
+            dst_obj_info = H5.H5Oget_info_by_name(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_info_by_name: " + err);
+        }
+        assertFalse("H5Oget_info ", dst_obj_info==null);
+        assertTrue("H5Oget_info object type", dst_obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+    }
+
+    @Test
+    public void testH5Ovisit_create() {
+        try {
+            int order = H5.H5Pget_link_creation_order(H5fcpl);
+            assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ovisit_create:H5Pget_link_creation_order " + err);
+        }
+        
+        _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+        class idata {
+            public String link_name = null;
+            public int link_type = -1;
+            idata(String name, int type) {
+                this.link_name = name;
+                this.link_type = type;
+            }
+        }
+        class H5O_iter_data implements H5O_iterate_t {
+            public ArrayList<idata> iterdata = new ArrayList<idata>();
+        }
+        H5O_iterate_t iter_data = new H5O_iter_data();
+        class H5O_iter_callback implements H5O_iterate_cb {
+            public int callback(int group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+                idata id = new idata(name, info.type);
+                ((H5O_iter_data)op_data).iterdata.add(id);
+                return 0;
+            }
+        }
+        H5O_iterate_cb iter_cb = new H5O_iter_callback();
+        try {
+            H5.H5Ovisit(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Ovisit: " + err);
+        }
+        assertFalse("H5Ovisit ", ((H5O_iter_data)iter_data).iterdata.isEmpty());
+        assertTrue("H5Ovisit "+((H5O_iter_data)iter_data).iterdata.size(), ((H5O_iter_data)iter_data).iterdata.size()==4);
+        assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(0)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+        assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(1)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS1")==0);
+        assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(2)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+        assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2")==0);
+    }
+
+    @Test
+    public void testH5Ocomment() {
+        int oid = -1;
+        String obj_comment = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+            H5.H5Oset_comment(oid, "Test Comment");
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment: " + err);
+        }
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+        assertFalse("H5Oget_comment: ", obj_comment==null);
+        assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment")==0);
+    }
+
+    @Test
+    public void testH5Ocomment_clear() {
+        int oid = -1;
+        String obj_comment = null;
+        try {
+            oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+            H5.H5Oset_comment(oid, "Test Comment");
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment: " + err);
+        }
+        assertFalse("H5Oget_comment: ", obj_comment==null);
+        assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment")==0);
+        try {
+            H5.H5Oset_comment(oid, null);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment(oid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment: " + err);
+        }
+        try {H5.H5Oclose(oid);} catch (Exception ex) {}
+        assertTrue("H5Oget_comment: ", obj_comment==null);
+    }
+
+    @Test
+    public void testH5Ocomment_by_name() {
+        String obj_comment = null;
+        try {
+            H5.H5Oset_comment_by_name(H5fid, "DS1", "Test Comment", HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment_by_name: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment_by_name: " + err);
+        }
+        assertFalse("H5Oget_comment_by_name: ", obj_comment==null);
+        assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment")==0);
+    }
+
+    @Test
+    public void testH5Ocomment_by_name_clear() {
+        String obj_comment = null;
+        try {
+            H5.H5Oset_comment_by_name(H5fid, "DS1", "Test Comment", HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment_by_name: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment_by_name: " + err);
+        }
+        assertFalse("H5Oget_comment_by_name: ", obj_comment==null);
+        assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment")==0);
+        try {
+            H5.H5Oset_comment_by_name(H5fid, "DS1", null, HDF5Constants.H5P_DEFAULT);
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oset_comment_by_name: " + err);
+        }
+        try {
+            obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Oget_comment_by_name: " + err);
+        }
+        assertTrue("H5Oget_comment_by_name: ", obj_comment==null);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Oparams.java b/sourceTest/java/test/hdf5lib/TestH5Oparams.java
new file mode 100644
index 0000000..79f3150
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Oparams.java
@@ -0,0 +1,126 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Oparams {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Oopen_null() throws Throwable {
+        H5.H5Oopen(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oopen_invalid() throws Throwable {
+        H5.H5Oopen(-1, "Bogus", 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Ocopy_invalid() throws Throwable {
+        H5.H5Ocopy(-1, "Bogus", -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ocopy_null_current() throws Throwable {
+        H5.H5Ocopy(-1, null, 0, "Bogus", 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ocopy_null_dest() throws Throwable {
+        H5.H5Ocopy(-1, "Bogus", 0, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_invalid() throws Throwable {
+        H5.H5Oget_info(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Oget_info_by_name_null() throws Throwable {
+        H5.H5Oget_info_by_name(-1, null, HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_name_invalid() throws Throwable {
+        H5.H5Oget_info_by_name(-1, "/testH5Gcreate", HDF5Constants.H5P_DEFAULT);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_info_by_idx_invalid() throws Throwable {
+        H5.H5Oget_info_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Oget_info_by_idx_null() throws Throwable {
+        H5.H5Oget_info_by_idx(-1, null, 0, 0, 0L, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Olink_invalid() throws Throwable {
+        H5.H5Olink(-1, -1, "Bogus", -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Olink_null_dest() throws Throwable {
+        H5.H5Olink(-1, 0, null, 0, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ovisit_null() throws Throwable {
+        H5.H5Ovisit(-1, -1, -1, null, null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ovisit_by_name_nullname() throws Throwable {
+        H5.H5Ovisit_by_name(-1, null, -1, -1, null, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Ovisit_by_name_null() throws Throwable {
+        H5.H5Ovisit_by_name(-1, "Bogus", -1, -1, null, null, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    @SuppressWarnings("deprecation")
+    public void testH5Oset_comment_invalid() throws Throwable {
+        H5.H5Oset_comment(-1, "Bogus");
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_comment_invalid() throws Throwable {
+        H5.H5Oget_comment(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    @SuppressWarnings("deprecation")
+    public void testH5Oset_comment_by_name_invalid() throws Throwable {
+        H5.H5Oset_comment_by_name(-1, "Bogus", null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    @SuppressWarnings("deprecation")
+    public void testH5Oset_comment_by_name_null() throws Throwable {
+        H5.H5Oset_comment_by_name(-1, null, null, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Oget_comment_by_name_invalid() throws Throwable {
+        H5.H5Oget_comment_by_name(-1, "Bogus", -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Oget_comment_by_name_null() throws Throwable {
+        H5.H5Oget_comment_by_name(-1, null, -1);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5P.java b/sourceTest/java/test/hdf5lib/TestH5P.java
new file mode 100644
index 0000000..d0884f8
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5P.java
@@ -0,0 +1,1162 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5P {
+    
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    long[] H5dims = { DIM_X, DIM_Y };
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    int lapl_id = -1;
+    int fapl_id = -1;
+    int fcpl_id = -1;
+    int ocpl_id = -1;
+    int ocp_plist_id = -1;
+    int lcpl_id = -1;
+    int plapl_id = -1;
+    int plist_id = -1;
+    int gapl_id = -1;
+    int gcpl_id = -1;
+    int acpl_id = -1;
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createDataset: ", did > 0);
+
+        return did;
+    }
+
+    private final void _createH5File(int fcpl, int fapl) {
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    fcpl, fapl);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ", H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+        assertTrue("TestH5D.createH5file: _createDataset: ", H5did > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    public void deleteH5file() throws HDF5LibraryException {
+        _deleteFile(H5_FILE);
+    }
+
+    @Before
+    public void createH5fileProperties()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            lapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+            fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+            fcpl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+            ocpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+            ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+            lcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
+            plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+            plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+            gapl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_ACCESS);
+            gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+            acpl_id = H5.H5Pcreate(HDF5Constants.H5P_ATTRIBUTE_CREATE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue(lapl_id > 0);
+        assertTrue(fapl_id > 0);
+        assertTrue(fcpl_id > 0);
+        assertTrue(ocpl_id > 0);
+        assertTrue(ocp_plist_id > 0);
+        assertTrue(lcpl_id > 0);
+        assertTrue(plapl_id>0);
+        assertTrue(plist_id > 0);
+        assertTrue(gapl_id > 0);
+        assertTrue(gcpl_id >0);
+        assertTrue(acpl_id >0);
+    }
+
+    @After
+    public void deleteH5fileProperties() throws HDF5LibraryException {
+        if (lapl_id >0)
+            try {H5.H5Pclose(lapl_id);} catch (Exception ex) {}
+        if (fapl_id >0)
+            try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+        if (fcpl_id >0)
+            try {H5.H5Pclose(fcpl_id);} catch (Exception ex) {}
+        if (ocpl_id >0)
+            try {H5.H5Pclose(ocpl_id);} catch (Exception ex) {}
+        if (ocp_plist_id >0)
+            try {H5.H5Pclose(ocp_plist_id);} catch (Exception ex) {}
+        if (lcpl_id >0)
+            try {H5.H5Pclose(lcpl_id);} catch (Exception ex) {}
+        if (plapl_id >0)
+            try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+        if (plist_id >0)
+            try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+        if (gapl_id >0)
+            try {H5.H5Pclose(gapl_id);} catch (Exception ex) {}
+        if (gcpl_id >0)
+            try {H5.H5Pclose(gcpl_id);} catch (Exception ex) {}
+        if (acpl_id >0)
+            try {H5.H5Pclose(acpl_id);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did > 0) 
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}       
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+    }
+
+    @Test
+    public void testH5Pget_nlinks() {
+        long nlinks = -1;
+        try {
+            nlinks = (long) H5.H5Pget_nlinks(lapl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Pget_nlinks: " + err);
+        }
+        assertTrue("testH5Pget_nlinks", nlinks > 0);
+        // Check the default value of nlinks.
+        assertEquals(nlinks, 16L);
+    }
+
+    @Test
+    public void testH5Pset_nlinks() {
+        long nlinks = 20;
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_nlinks(lapl_id, nlinks);
+            nlinks = (long) H5.H5Pget_nlinks(lapl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Pset_nlinks: " + err);
+        }
+        assertTrue("testH5Pset_nlinks", ret_val >= 0);
+        // Check the value of nlinks retrieved from H5Pget_nlinks function.
+        assertEquals(nlinks, 20L); 
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_libver_bounds_invalidlow() throws Throwable {
+        H5.H5Pset_libver_bounds(fapl_id, 5, HDF5Constants.H5F_LIBVER_LATEST);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_libver_bounds_invalidhigh() throws Throwable {
+        H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, 5);
+    }
+    
+    @Test
+    public void testH5Pget_link_creation_order() {
+        int crt_order_flags = 0;
+        try {
+            crt_order_flags = H5.H5Pget_link_creation_order(fcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_link_creation_order: " + err);
+        }
+        assertTrue("testH5Pget_link_creation_order", crt_order_flags >= 0);
+    }
+
+    @Test
+    public void testH5Pset_link_creation_order_trackedPLUSindexed() {
+        int ret_val = -1;
+        int crt_order_flags = HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED;
+        int crtorderflags = 0;
+
+        try {
+            ret_val = H5.H5Pset_link_creation_order(fcpl_id, crt_order_flags);
+            crtorderflags = H5.H5Pget_link_creation_order(fcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_link_creation_order: " + err);
+        } 
+        assertTrue("testH5Pset_link_creation_order_trackedPLUSindexed",ret_val >= 0);
+        assertEquals(crt_order_flags, crtorderflags);
+    }
+    
+    @Test
+    public void testH5Pset_link_creation_order_tracked() {
+        int ret_val = -1;
+        int crtorderflags = 0;
+
+        try {
+            ret_val = H5.H5Pset_link_creation_order(fcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED);
+            crtorderflags = H5.H5Pget_link_creation_order(fcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_link_creation_order: " + err);
+        } 
+        assertTrue("testH5Pset_link_creation_order_tracked",ret_val >= 0);
+        assertEquals(HDF5Constants.H5P_CRT_ORDER_TRACKED, crtorderflags);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_link_creation_order_invalidvalue() throws Throwable {
+        H5.H5Pset_link_creation_order(fcpl_id, HDF5Constants.H5P_CRT_ORDER_INDEXED);
+    }
+    
+    @Test
+    public void testH5Pget_attr_creation_order() {
+        int crt_order_flags = 0;
+
+        try {
+            crt_order_flags = H5.H5Pget_attr_creation_order(ocpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_attr_creation_order: " + err);
+        } 
+        assertTrue("testH5Pget_attr_creation_order", crt_order_flags >= 0);
+    }
+    
+    @Test
+    public void testH5Pset_attr_creation_order_trackedPLUSindexed() {
+        int ret_val = -1;
+        int crt_order_flags = HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED;
+        int crtorderflags = 0;
+
+        try {
+            ret_val = H5.H5Pset_attr_creation_order(ocpl_id, crt_order_flags);
+            crtorderflags = H5.H5Pget_attr_creation_order(ocpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_attr_creation_order: " + err);
+        } 
+        assertTrue("testH5Pset_attr_creation_order_trackedPLUSindexed", ret_val >= 0);
+        assertEquals(crt_order_flags, crtorderflags);
+    }
+    
+    @Test
+    public void testH5Pset_attr_creation_order_tracked() {
+        int ret_val = -1;
+        int crtorderflags = 0;
+
+        try {
+            ret_val = H5.H5Pset_attr_creation_order(ocpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED);
+            crtorderflags = H5.H5Pget_attr_creation_order(ocpl_id);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_attr_creation_order: " + err);
+        } 
+        assertTrue("testH5Pset_attr_creation_order_tracked", ret_val >= 0);
+        assertEquals(HDF5Constants.H5P_CRT_ORDER_TRACKED, crtorderflags);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_attr_creation_order_invalidvalue() throws Throwable {
+        H5.H5Pset_attr_creation_order(ocpl_id, HDF5Constants.H5P_CRT_ORDER_INDEXED);
+    }
+    
+    @Test
+    public void testH5Pset_copy_object() {
+    
+        int cpy_option = -1;
+
+        try {
+            H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG);
+            cpy_option = H5.H5Pget_copy_object(ocp_plist_id);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_copy_object: " + err);
+        } 
+        assertEquals(HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG, cpy_option);
+        
+        try {
+            H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+            cpy_option = H5.H5Pget_copy_object(ocp_plist_id);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_copy_object: " + err);
+        } 
+        assertEquals(HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG, cpy_option);        
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_copy_object_invalidobject() throws Throwable {
+        H5.H5Pset_copy_object(HDF5Constants.H5P_DEFAULT, HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG);
+    }
+
+    @Test
+    public void testH5Pset_create_intermediate_group() {
+    
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_create_intermediate_group(lcpl_id, true);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_create_intermediate_group: " + err);
+        } 
+        assertTrue(ret_val>=0);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_create_intermediate_group_invalidobject() throws Throwable {
+        H5.H5Pset_create_intermediate_group(ocp_plist_id, true);
+    }
+    
+    @Test
+    public void testH5Pget_create_intermediate_group() {
+        boolean flag = false;
+        try {
+            H5.H5Pset_create_intermediate_group(lcpl_id, true);
+            flag = H5.H5Pget_create_intermediate_group(lcpl_id);    
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_create_intermediate_group: " + err);
+        }
+        assertEquals(true, flag);
+    }
+    
+    @Test
+    public void testH5Pget_create_intermediate_group_notcreated() {
+        boolean flag = true;
+        try {
+            flag = H5.H5Pget_create_intermediate_group(lcpl_id);    
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_create_intermediate_group_notcreated: " + err);
+        }
+        assertEquals(false, flag);
+    }
+    
+    @Test
+    public void testH5Pset_data_transform() {
+        
+        String expression = "(5/9.0)*(x-32)";
+        int ret_val = -1;
+
+        try {
+            ret_val= H5.H5Pset_data_transform(plist_id, expression);    
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_data_transform: " + err);
+        }
+        assertTrue(ret_val>=0);    
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pset_data_transform_NullExpression() throws Throwable {
+        H5.H5Pset_data_transform(plist_id, null);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_data_transform_InvalidExpression1() throws Throwable {
+        H5.H5Pset_data_transform(plist_id, "");
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_data_transform_InvalidExpression2() throws Throwable {
+        H5.H5Pset_data_transform(plist_id, "hello");
+    }
+    
+    @Test
+    public void testH5Pget_data_transform() {
+        
+        String expression = "(5/9.0)*(x-32)";
+        String [] express = {""};
+        long express_size = 0;
+        long size = 20;
+
+        try {
+            H5.H5Pset_data_transform(plist_id, expression);    
+            express_size = H5.H5Pget_data_transform(plist_id, express, size);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_data_transform: " + err);
+        }
+        assertTrue(express_size>=0);
+        assertTrue("The data transform expression: ", expression.equals(express[0]));
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pget_data_transform_ExpressionNotSet() throws Throwable {
+        String [] express = {""};
+        H5.H5Pget_data_transform(plist_id, express, 20);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pget_data_transform_IllegalSize() throws Throwable {
+        String [] express = {""};
+        H5.H5Pset_data_transform(plist_id, "(5/9.0)*(x-32)");
+        H5.H5Pget_data_transform(plist_id, express, 0);
+    }
+    
+    @Test
+    public void testH5Pget_elink_acc_flags() {
+        
+        int get_flags = -1;
+        try {
+            get_flags = H5.H5Pget_elink_acc_flags(gapl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_elink_acc_flags: " + err);
+        }
+        assertTrue("H5Pget_elink_acc_flags", get_flags >= 0);
+        assertEquals(HDF5Constants.H5F_ACC_DEFAULT, get_flags);
+    }
+    
+    @Test
+    public void testH5Pset_elink_acc_flags() {
+        
+        int get_flags = -1;
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_elink_acc_flags(lapl_id, HDF5Constants.H5F_ACC_RDWR);
+            get_flags = H5.H5Pget_elink_acc_flags(lapl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_elink_acc_flags: " + err);
+        }
+        assertTrue("H5Pset_elink_acc_flags", ret_val >= 0);
+        assertEquals(HDF5Constants.H5F_ACC_RDWR, get_flags);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_elink_acc_flags_InvalidFlag1() throws Throwable {
+        H5.H5Pset_elink_acc_flags(lapl_id, HDF5Constants.H5F_ACC_TRUNC);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_elink_acc_flags_InvalidFlag2() throws Throwable {
+        H5.H5Pset_elink_acc_flags(lapl_id, -1);
+    }
+    
+    @Test
+    public void testH5Pset_link_phase_change() {
+        
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_link_phase_change(fcpl_id , 2, 2);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_link_phase_change: " + err);
+        }
+        assertTrue("H5Pset_link_phase_change", ret_val >= 0);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_link_phase_change_Highmax_Compact() throws Throwable {
+        H5.H5Pset_link_phase_change(fcpl_id , 70000000, 3);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_link_phase_change_max_compactLESSTHANmin_dense() throws Throwable {
+        H5.H5Pset_link_phase_change(fcpl_id , 5, 6);
+    }
+    
+    @Test
+    public void testH5Pget_link_phase_change() {
+        int ret_val = -1;
+        int[] links = new int[2];
+
+        try {
+            ret_val = H5.H5Pget_link_phase_change(fcpl_id, links); 
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_link_phase_change: " + err);
+        }
+        assertTrue("testH5Pget_link_phase_change", ret_val >= 0);
+        assertEquals("Default value of maximum compact storage", 8, links[0]);
+        assertEquals("Default value of minimum dense storage", 6, links[1]);
+    }
+    
+    @Test
+    public void testH5Pget_link_phase_change_EqualsSet() {
+        int[] links = new int[2];
+        try {
+            H5.H5Pset_link_phase_change(fcpl_id , 10, 7);
+            H5.H5Pget_link_phase_change(fcpl_id, links); 
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_link_phase_change_EqualsSet: " + err);
+        }
+        assertEquals("Value of maximum compact storage set", 10, links[0]);
+        assertEquals("Value of minimum dense storage set", 7, links[1]);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_link_phase_change_Null() throws Throwable {
+        H5.H5Pget_link_phase_change(fcpl_id, null); 
+    }
+    
+    @Test
+    public void testH5Pget_attr_phase_change() {
+        int ret_val = -1;
+        int[] attributes = new int[2];
+
+        try {
+            ret_val = H5.H5Pget_attr_phase_change(ocpl_id, attributes); 
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_attr_phase_change: " + err);
+        }
+        assertTrue("testH5Pget_attr_phase_change", ret_val >= 0);
+        assertEquals("Default value of the max. no. of attributes stored in compact storage", 8, attributes[0]);
+        assertEquals("Default value of the min. no. of attributes stored in dense storage", 6, attributes[1]);
+    }
+
+    @Test
+    public void testH5Pget_shared_mesg_phase_change() {
+        int ret_val = -1;
+        int[] size = new int[2];
+
+        try {
+            ret_val = H5.H5Pget_shared_mesg_phase_change(fcpl_id, size); 
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_shared_mesg_phase_change: " + err);
+        }
+        assertTrue("testH5Pget_shared_mesg_phase_change", ret_val >= 0);
+    }
+
+    @Test
+    public void testH5Pget_shared_mesg_phase_change_EqualsSET() {
+        int[] size = new int[2];
+
+        try {
+            H5.H5Pset_shared_mesg_phase_change(fcpl_id,50, 40);
+            H5.H5Pget_shared_mesg_phase_change(fcpl_id, size); 
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_shared_mesg_phase_change_EqualsSET: " + err);
+        }
+        assertEquals("Value of maximum list set", 50, size[0]);
+        assertEquals("Value of minimum btree set", 40, size[1]);
+    }
+
+    @Test
+    public void testH5Pset_shared_mesg_phase_change() {
+
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_shared_mesg_phase_change(fcpl_id,2, 1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_shared_mesg_phase_change: " + err);
+        }
+        assertTrue("H5Pset_shared_mesg_phase_change", ret_val >= 0);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5PH5Pset_shared_mesg_phase_change_HighMaxlistValue() throws Throwable {
+        H5.H5Pset_shared_mesg_phase_change(fcpl_id, 5001, 4000);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5PH5Pset_shared_mesg_phase_change_HighMinbtreeValue() throws Throwable {
+        H5.H5Pset_shared_mesg_phase_change(fcpl_id, 5000, 5001);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5PH5Pset_shared_mesg_phase_change_MinbtreeGreaterThanMaxlist() throws Throwable {
+        H5.H5Pset_link_phase_change(fcpl_id , 3, 7);
+    }
+
+    @Test
+    public void testH5Pget_shared_mesg_nindexes() {
+
+        int nindexes = -1;
+        try {
+            nindexes = H5.H5Pget_shared_mesg_nindexes(fcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_shared_mesg_nindexes: " + err);
+        }
+        assertTrue("H5Pget_shared_mesg_nindexes", nindexes >= 0);
+    }
+
+    @Test
+    public void testH5Pset_shared_mesg_nindexes() {
+
+        int nindexes = -1;
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_shared_mesg_nindexes(fcpl_id, 7);
+            nindexes = H5.H5Pget_shared_mesg_nindexes(fcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_shared_mesg_nindexes: " + err);
+        }
+        assertTrue("H5Pset_shared_mesg_nindexes", ret_val >= 0);
+        assertEquals("Value of nindexes is equal to value set",7 ,nindexes);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_shared_mesg_nindexes_InvalidHIGHnindexes()throws Throwable {
+        H5.H5Pset_shared_mesg_nindexes(fcpl_id, 9);
+    }
+
+    @Test
+    public void testH5Pset_shared_mesg_index() {
+
+        int ret_val = -1;
+        try {
+            H5.H5Pset_shared_mesg_nindexes(fcpl_id, 2);
+            ret_val = H5.H5Pset_shared_mesg_index(fcpl_id, 0,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_shared_mesg_index: " + err);
+        }
+        assertTrue("H5Pset_shared_mesg_index", ret_val >= 0);    
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_shared_mesg_index_Invalid_indexnum() throws Throwable {
+        H5.H5Pset_shared_mesg_index(fcpl_id, 2,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_shared_mesg_index_InvalidFlag() throws Throwable {
+        H5.H5Pset_shared_mesg_nindexes(fcpl_id, 7);
+        H5.H5Pset_shared_mesg_index(fcpl_id, 2,HDF5Constants.H5O_SHMESG_ALL_FLAG + 1, 10);
+    }
+
+    @Test
+    public void testH5Pget_shared_mesg_index() {
+
+        int ret_val = -1;
+        int[] mesg_info = new int[2];
+        try {
+            H5.H5Pset_shared_mesg_nindexes(fcpl_id, 2);
+            H5.H5Pset_shared_mesg_index(fcpl_id, 0,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+            ret_val = H5.H5Pget_shared_mesg_index(fcpl_id, 0, mesg_info);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_shared_mesg_index: " + err);
+        }
+        assertTrue("H5Pget_shared_mesg_index", ret_val >= 0);    
+        assertEquals("Type of message", HDF5Constants.H5O_SHMESG_ATTR_FLAG, mesg_info[0]);
+        assertEquals("minimum message size", 10, mesg_info[1]);    
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pget_shared_mesg_index_Invalid_indexnum() throws Throwable {
+        int[] mesg_info = new int[2];
+        H5.H5Pget_shared_mesg_index(fcpl_id, 0, mesg_info);
+    }
+    
+    @Test
+    public void testH5Pset_local_heap_size_hint() {
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_local_heap_size_hint(gcpl_id, 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_local_heap_size_hint: " + err);
+        }
+        assertTrue("H5Pset_local_heap_size_hint", ret_val >= 0);    
+    }
+
+    @Test
+    public void testH5Pget_local_heap_size_hint() {
+        long size_hint = -1;
+        try {
+            size_hint = H5.H5Pget_local_heap_size_hint(gcpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_local_heap_size_hint: " + err);
+        }
+        assertTrue("H5Pget_local_heap_size_hint", size_hint >= 0);    
+    }
+    
+    @Test
+    public void testH5Pset_nbit() {
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_nbit(ocpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_nbit: " + err);
+        }
+        assertTrue("H5Pset_nbit", ret_val >= 0);    
+    }
+    
+    @Test
+    public void testH5Pset_scaleoffset() {
+        int ret_val = -1;
+        int scale_type = HDF5Constants.H5Z_SO_FLOAT_DSCALE;
+        int scale_factor = HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT;
+        try {
+            ret_val = H5.H5Pset_scaleoffset(ocpl_id, scale_type, scale_factor);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_scaleoffset: " + err);
+        }
+        assertTrue("H5Pset_scaleoffset", ret_val >= 0);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_scaleoffset_Invalidscale_type() throws Throwable {
+        H5.H5Pset_scaleoffset(ocpl_id, 3, 1);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_scaleoffset_Invalidscale_factor() throws Throwable {
+        H5.H5Pset_scaleoffset(ocpl_id, HDF5Constants.H5Z_SO_INT, -1);
+    }
+    
+    @Test
+    public void testH5Pset_est_link_info() {
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_est_link_info(gcpl_id, 0,10);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_est_link_info: " + err);
+        }
+        assertTrue("H5Pset_est_link_info", ret_val >= 0);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Pset_est_link_info_InvalidValues() throws Throwable {
+        H5.H5Pset_est_link_info(gcpl_id, 100000,10);
+    }
+    
+    @Test
+    public void testH5Pget_est_link_info() {
+        int ret_val = -1;
+        int[] link_info = new int[2];
+        try {
+            ret_val = H5.H5Pget_est_link_info(gcpl_id, link_info);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_est_link_info: " + err);
+        }
+        assertTrue("H5Pget_est_link_info", ret_val >= 0);    
+    }
+    
+    @Test
+    public void testH5Pset_elink_prefix() {
+        int ret_val = -1;
+        String prefix = "tmp";
+        try {
+            ret_val = H5.H5Pset_elink_prefix(plapl_id, prefix);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_est_link_info: " + err);
+        }
+        assertTrue("H5Pset_elink_prefix", ret_val >= 0);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pset_elink_prefix_null() throws Throwable{
+        H5.H5Pset_elink_prefix(plapl_id, null);
+    }
+    
+    @Test
+    public void testH5Pget_elink_prefix() {
+        String prefix = "tmp";
+        String[] pre = {""};
+        long prefix_size = 0;
+
+        try {
+            H5.H5Pset_elink_prefix(plapl_id, prefix);
+            prefix_size = H5.H5Pget_elink_prefix(plapl_id, pre);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_elink_prefix: " + err);
+        }
+        assertTrue(prefix_size>=0);
+        assertTrue("The prefix: ", prefix.equals(pre[0]));
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_elink_prefix_null() throws Throwable {
+        H5.H5Pget_elink_prefix(plapl_id, null);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_version_null() throws Throwable {
+        H5.H5Pget_version(fcpl_id, null);
+    }
+    
+    @Test
+    public void testH5Pget_version() {
+        int[] version_info = {255,255,255,255};
+
+        try {
+            _createH5File(fcpl_id, fapl_id);
+            H5.H5Pget_version(fcpl_id, version_info);
+            deleteH5file();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_version: " + err);
+        }
+        assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+        assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+        assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+        assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_userblock_null() throws Throwable {
+        H5.H5Pget_userblock(fcpl_id, null);
+    }
+    
+    @Test
+    public void testH5P_userblock() {
+        int[] version_info = {255,255,255,255};
+        long[] size = {0};
+
+        try {
+            H5.H5Pset_userblock(fcpl_id, 1024);
+            _createH5File(fcpl_id, fapl_id);
+
+            /* Close FCPL */
+            H5.H5Pclose(fcpl_id);
+
+            /* Get the file's dataset creation property list */
+            fcpl_id =  H5.H5Fget_create_plist(H5fid);
+
+            /* Get the file's version information */
+            H5.H5Pget_version(fcpl_id, version_info);
+            H5.H5Pget_userblock(fcpl_id, size);
+            deleteH5file();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_userblock: " + err);
+        }
+        assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+        assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+        assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+        assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+        assertTrue("user block size: "+size[0], size[0] == 1024);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_sizes_null() throws Throwable {
+        H5.H5Pget_sizes(fcpl_id, null);
+    }
+    
+    @Test
+    public void testH5P_sizes() {
+        int[] version_info = {255,255,255,255};
+        long[] size = {0,0};
+
+        try {
+            H5.H5Pset_sizes(fcpl_id, 4, 8);
+            _createH5File(fcpl_id, fapl_id);
+
+            /* Close FCPL */
+            H5.H5Pclose(fcpl_id);
+
+            /* Get the file's dataset creation property list */
+            fcpl_id =  H5.H5Fget_create_plist(H5fid);
+
+            /* Get the file's version information */
+            H5.H5Pget_version(fcpl_id, version_info);
+            H5.H5Pget_sizes(fcpl_id, size);
+            deleteH5file();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_sizes: " + err);
+        }
+        assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+        assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+        assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+        assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+        assertTrue("sizeof_addr size: "+size[0], size[0] == 4);
+        assertTrue("sizeof_size size: "+size[1], size[1] == 8);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_sym_k_null() throws Throwable {
+        H5.H5Pget_sym_k(fcpl_id, null);
+    }
+    
+    @Test
+    public void testH5P_sym_k() {
+        int[] version_info = {255,255,255,255};
+        int[] size = {0,0};
+
+        try {
+            H5.H5Pset_sym_k(fcpl_id, 32, 8);
+            _createH5File(fcpl_id, fapl_id);
+
+            /* Close FCPL */
+            H5.H5Pclose(fcpl_id);
+
+            /* Get the file's dataset creation property list */
+            fcpl_id =  H5.H5Fget_create_plist(H5fid);
+
+            /* Get the file's version information */
+            H5.H5Pget_version(fcpl_id, version_info);
+            H5.H5Pget_sym_k(fcpl_id, size);
+            deleteH5file();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_sym_k: " + err);
+        }
+        assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+        assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+        assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+        assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+        assertTrue("symbol table tree rank: "+size[0], size[0] == 32);
+        assertTrue("symbol table node size: "+size[1], size[1] == 8);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Pget_istore_k_null() throws Throwable {
+        H5.H5Pget_istore_k(fcpl_id, null);
+    }
+    
+    @Test
+    public void testH5P_istore_k() {
+        int[] version_info = {255,255,255,255};
+        int[] size = {0};
+
+        try {
+            H5.H5Pset_istore_k(fcpl_id, 64);
+            _createH5File(fcpl_id, fapl_id);
+
+            /* Close FCPL */
+            H5.H5Pclose(fcpl_id);
+
+            /* Get the file's dataset creation property list */
+            fcpl_id =  H5.H5Fget_create_plist(H5fid);
+
+            /* Get the file's version information */
+            H5.H5Pget_version(fcpl_id, version_info);
+            H5.H5Pget_istore_k(fcpl_id, size);
+            deleteH5file();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_sym_k: " + err);
+        }
+        assertTrue("super block version: "+version_info[0], version_info[0] == 1);
+        assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+        assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+        assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+        assertTrue("chunked storage b-tree 1/2-rank: "+size[0], size[0] == 64);
+    }
+    
+    @Test
+    public void testH5P_obj_track_times() {
+        boolean default_ret_val = false;
+        boolean ret_val = true;
+        try {
+            default_ret_val = H5.H5Pget_obj_track_times(ocpl_id);
+            H5.H5Pset_obj_track_times(ocpl_id, false);
+            ret_val = H5.H5Pget_obj_track_times(ocpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_obj_track_times: " + err);
+        }
+        assertTrue("H5Pget_obj_track_times default", default_ret_val);    
+        assertFalse("H5Pget_obj_track_times", ret_val);    
+    }
+    
+    @Test
+    public void testH5Pget_char_encoding() {
+        int char_encoding = 0;
+
+        try {
+            char_encoding = H5.H5Pget_char_encoding(acpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_char_encoding: " + err);
+        } 
+        assertTrue("testH5Pget_char_encoding", char_encoding == HDF5Constants.H5T_CSET_ASCII);
+        try {
+            H5.H5Pset_char_encoding(acpl_id, HDF5Constants.H5T_CSET_UTF8);
+            char_encoding = H5.H5Pget_char_encoding(acpl_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_char_encoding: " + err);
+        } 
+        assertTrue("testH5Pget_char_encoding", char_encoding == HDF5Constants.H5T_CSET_UTF8);
+    }
+    
+    @Test
+    public void testH5P_fill_time() {
+        int[] fill_time = {0};
+
+        try {
+            H5.H5Pget_fill_time(ocpl_id, fill_time);
+            assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_IFSET);
+            H5.H5Pset_fill_time(ocpl_id, HDF5Constants.H5D_FILL_TIME_ALLOC);
+            H5.H5Pget_fill_time(ocpl_id, fill_time);
+            assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_ALLOC);
+            H5.H5Pset_fill_time(ocpl_id, HDF5Constants.H5D_FILL_TIME_NEVER);
+            H5.H5Pget_fill_time(ocpl_id, fill_time);
+            assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_NEVER);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fill_time: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_alloc_time() {
+        int[] alloc_time = {0};
+
+        try {
+            H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+            assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_LATE);
+            H5.H5Pset_alloc_time(ocpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+            H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+            assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY);
+            H5.H5Pset_alloc_time(ocpl_id, HDF5Constants.H5D_ALLOC_TIME_INCR);
+            H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+            assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_INCR);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_alloc_time: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_fill_value() {
+        int[] fill_value = {-1};
+        int[] fill_value_status = {-1};
+
+        try {
+            H5.H5Pfill_value_defined(ocpl_id, fill_value_status);
+            assertTrue("fill_value_status: "+fill_value_status[0], fill_value_status[0] == HDF5Constants.H5D_FILL_VALUE_DEFAULT);
+            H5.H5Pget_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+            assertTrue("fill_value: "+fill_value[0], fill_value[0] == 0);
+            fill_value[0] = 255;
+            H5.H5Pset_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+            H5.H5Pget_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+            assertTrue("fill_value: "+fill_value[0], fill_value[0] == 255);
+            H5.H5Pfill_value_defined(ocpl_id, fill_value_status);
+            assertTrue("fill_value_status: "+fill_value_status[0], fill_value_status[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fill_value: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_layout() {
+        int layout_type = -1;
+
+        try {
+            layout_type = H5.H5Pget_layout(ocpl_id);
+            assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_CONTIGUOUS);
+            H5.H5Pset_layout(ocpl_id, HDF5Constants.H5D_COMPACT);
+            layout_type = H5.H5Pget_layout(ocpl_id);
+            assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_COMPACT);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_layout: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_chunk() {
+        long[] chunk_size = {0,0};
+        long[] chunk_new_size = {2,3};
+        int layout_type = -1;
+
+        try {
+            H5.H5Pset_chunk(ocpl_id, 2, chunk_new_size);
+            H5.H5Pget_chunk(ocpl_id, 2, chunk_size);
+            assertTrue("chunk: "+chunk_size[0], chunk_size[0] == chunk_new_size[0]);
+            assertTrue("chunk: "+chunk_size[1], chunk_size[1] == chunk_new_size[1]);
+            layout_type = H5.H5Pget_layout(ocpl_id);
+            assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_CHUNKED);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_chunk: " + err);
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5PData.java b/sourceTest/java/test/hdf5lib/TestH5PData.java
new file mode 100644
index 0000000..363d2e2
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5PData.java
@@ -0,0 +1,150 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5PData {
+    
+    private static final String H5_FILE = "test.h5";
+    private static final int DIM_X = 12;
+    private static final int DIM_Y = 18;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    int plist_id = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+    double windchillF[][] =
+    {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+     {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+     {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+     {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+     {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+     {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+     {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+     {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+     {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+     {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+     {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+     {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+    };
+
+    private final void _deleteFile(String filename) {
+        File file = new File(filename);
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final int _createFloatDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_NATIVE_FLOAT, dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createFloatDataset: ", did > 0);
+
+        return did;
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did = _createFloatDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+            plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+        assertTrue("TestH5D.createH5file: _createFloatDataset: ",H5did > 0);
+        assertTrue(plist_id > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did > 0) 
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}       
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ 
+        _deleteFile(H5_FILE);
+        
+        if (plist_id > 0)
+            try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+    }
+    
+    @Test
+    public void testH5Pdata_transform() {
+        String f_to_c = "(5/9.0)*(x-32)";
+        double windchillFread[][] = new double[DIM_X][DIM_Y];
+        double windchillC;
+        NumberFormat formatter = new DecimalFormat("#0.000");
+
+        try {
+            H5.H5Pset_data_transform(plist_id, f_to_c);  
+            H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    plist_id, windchillF);
+            H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    HDF5Constants.H5P_DEFAULT, windchillFread);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pdata_transform: " + err);
+        }
+        for(int row = 0; row < DIM_X; row++)
+            for(int col = 0; col < DIM_Y; col++) {
+                windchillC = (5/9.0)*(windchillF[row][col]-32);
+                String Cstr = formatter.format(windchillC);
+                String Fread = formatter.format(windchillFread[row][col]);
+                assertTrue("H5Pdata_transform: <"+row+","+col+">"+Fread+"="+Cstr, Fread.compareTo(Cstr)==0);
+            }
+    }
+    
+    @Test
+    public void testH5P_buffer() {
+        long default_size = 0;
+        long size = 0;
+
+        try {
+            default_size = H5.H5Pget_buffer_size(plist_id);  
+            H5.H5Pset_buffer_size(plist_id, DIM_X*DIM_Y);  
+            size = H5.H5Pget_buffer_size(plist_id);  
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_buffer: " + err);
+        }
+        assertTrue("H5P_buffer default: "+default_size, default_size==1024*1024);
+        assertTrue("H5P_buffer default: "+size, size==DIM_X*DIM_Y);
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Pfapl.java b/sourceTest/java/test/hdf5lib/TestH5Pfapl.java
new file mode 100644
index 0000000..4cb3091
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Pfapl.java
@@ -0,0 +1,1314 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+import ncsa.hdf.hdf5lib.structs.H5AC_cache_config_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Pfapl {
+    
+    private static final String H5_FILE = "test.h5";
+    private static final String H5_LOG_FILE = "test.log";
+    private static final String H5_FAMILY_FILE = "test%05d";
+    private static final String H5_MULTI_FILE = "testmulti";
+    private static char  MULTI_LETTERS[] = {'X','s','b','r','g','l','o'};
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    private static final int DIMF_X = 12;
+    private static final int DIMF_Y = 18;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    int H5Fdsid = -1;
+    int H5Fdid = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+    int fapl_id = -1;
+    int plapl_id = -1;
+    int dapl_id = -1;
+    int multi_dxplid = -1;
+    int plist_id = -1;
+    int btplist_id = -1;
+    long[] H5Fdims = { DIMF_X, DIMF_Y };
+    double windchillF[][] =
+    {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+     {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+     {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+     {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+     {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+     {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+     {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+     {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+     {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+     {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+     {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+     {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+    };
+
+    private final void _deleteFile(String filename) {
+        File file = null;
+        try {
+            file = new File(filename);
+        } 
+        catch (Throwable err) {}
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+    
+    private final void _deleteLogFile() {
+        File file = null;
+        try {
+            file = new File(H5_LOG_FILE);
+        } 
+        catch (Throwable err) {}
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final void _deleteFamilyFile() {
+        File file = null;
+        for(int indx = 0; ;indx++) {
+            java.text.DecimalFormat myFormat = new java.text.DecimalFormat("00000");
+            try {
+                file = new File("test"+myFormat.format(new Integer(indx))+".h5");
+            } 
+            catch (Throwable err) {}
+
+            if (file.exists()) {
+                try {file.delete();} catch (SecurityException e) {}
+            }
+            else
+                return;
+        }
+    }
+
+    private final void _deleteMultiFile() {
+        File file = null;
+        for(int indx = 1;indx<7;indx++) {
+            try {
+                file = new File(H5_MULTI_FILE+"-"+MULTI_LETTERS[indx]+".h5");
+            } 
+            catch (Throwable err) {}
+
+            if (file.exists()) {
+                try {file.delete();} catch (SecurityException e) {}
+            }
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        } catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createDataset: ", did > 0);
+
+        return did;
+    }
+
+    private final void _createFloatDataset() {
+        try {
+            H5Fdsid = H5.H5Screate_simple(2, H5Fdims, null);
+            H5Fdid = H5.H5Dcreate(H5fid, "dsfloat", HDF5Constants.H5T_NATIVE_FLOAT, H5Fdsid,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        } catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5D._createFloatDataset: ", H5Fdid > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    private final void _createH5multiFileDS() {
+        try {
+            H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: _createDataset: ", H5did > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    private final void _createH5File(int fapl) {
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, fapl);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ", H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+        assertTrue("TestH5D.createH5file: _createDataset: ", H5did > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    private final void _createH5familyFile(int fapl) {
+        try {
+            H5fid = H5.H5Fcreate(H5_FAMILY_FILE+".h5", HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, fapl);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ", H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+        assertTrue("TestH5D.createH5file: _createDataset: ", H5did > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    private final void _createH5multiFile(int fapl) {
+        try {
+            H5fid = H5.H5Fcreate(H5_MULTI_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, fapl);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ", H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+
+        try {
+            H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+        }
+    }
+
+    public void deleteH5file() {
+        _deleteFile(H5_FILE);
+    }
+
+    public void deleteH5familyfile() {
+        _deleteFamilyFile();
+    }
+
+    public void deleteH5multifile() {
+        _deleteMultiFile();
+    }
+
+    @Before
+    public void createFileAccess()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createFileAccess: " + err);
+        }
+        assertTrue(fapl_id > 0);
+        try {
+            plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createFileAccess: " + err);
+        }
+        assertTrue(plapl_id > 0);
+        try {
+            multi_dxplid = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+            plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+            btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+            dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createFileAccess: " + err);
+        }
+        assertTrue(multi_dxplid > 0);
+        assertTrue(plist_id > 0);
+        assertTrue(btplist_id > 0);
+        assertTrue(dapl_id > 0);
+    }
+
+    @After
+    public void deleteFileAccess() throws HDF5LibraryException {
+        if (fapl_id > 0)
+            try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+        if (plapl_id > 0)
+            try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+        if (dapl_id > 0)
+            try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+        if (plist_id > 0)
+            try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+        if (btplist_id > 0)
+            try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+        if (multi_dxplid > 0)
+            try {H5.H5Pclose(multi_dxplid);} catch (Exception ex) {}
+        
+        if (H5Fdsid > 0) 
+            try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+        if (H5Fdid > 0) 
+            try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did > 0) 
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+    }
+    
+    @Test
+    public void testH5Pget_libver_bounds() {
+        int ret_val = -1;
+        int[] libver = new int[2];
+        
+        try {
+            ret_val = H5.H5Pget_libver_bounds(fapl_id, libver);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_libver_bounds: " + err);
+        }
+        assertTrue("testH5Pget_libver_bounds", ret_val >= 0);
+        // Check the Earliest Version if the library
+        assertEquals(HDF5Constants.H5F_LIBVER_EARLIEST, libver[0]);
+        // Check the Latest Version if the library
+        assertEquals(HDF5Constants.H5F_LIBVER_LATEST, libver[1]);
+    }
+       
+    @Test
+    public void testH5Pset_libver_bounds() {
+        
+        int ret_val = -1;
+        int low = HDF5Constants.H5F_LIBVER_EARLIEST;
+        int high = HDF5Constants.H5F_LIBVER_LATEST;
+        int[] libver = new int[2];
+
+        try {
+            ret_val = H5.H5Pset_libver_bounds(fapl_id, low, high);
+            H5.H5Pget_libver_bounds(fapl_id, libver);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_libver_bounds: " + err);
+        }
+        assertTrue("testH5Pset_libver_bounds", ret_val >= 0);
+        // Check the Earliest Version if the library
+        assertEquals(HDF5Constants.H5F_LIBVER_EARLIEST, libver[0]);
+        // Check the Latest Version if the library
+        assertEquals(HDF5Constants.H5F_LIBVER_LATEST, libver[1]);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Pset_elink_fapl_NegativeID() throws Throwable {
+        H5.H5Pset_elink_fapl(-1, fapl_id );
+    }
+
+    @Test
+    public void testH5Pset_elink_fapl() {
+        int ret_val = -1;
+        try {
+            ret_val = H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_elink_fapl: " + err);
+        }
+        assertTrue("H5Pset_elink_fapl", ret_val >= 0);
+    }
+    
+    @Test
+    public void testH5Pget_elink_fapl() {
+        int ret_val_id = -1;
+        try {
+            ret_val_id = H5.H5Pget_elink_fapl(plapl_id);
+            assertTrue("H5Pget_elink_fapl", ret_val_id >= 0);
+            assertEquals(HDF5Constants.H5P_DEFAULT, ret_val_id );
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_elink_fapl: " + err);
+        }
+        finally {
+            if (ret_val_id > 0)
+                try {H5.H5Pclose(ret_val_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5P_elink_fapl() {
+        int ret_val_id = -1;
+        try {
+            H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+            ret_val_id = H5.H5Pget_elink_fapl(plapl_id);
+            assertTrue("H5P_elink_fapl", ret_val_id >= 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_elink_fapl: " + err);
+        }
+        finally {
+            if (ret_val_id > 0)
+                try {H5.H5Pclose(ret_val_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5P_elink_file_cache_size() {
+        int elink_fapl_id = -1;
+        int efc_size = 0;
+        try {
+            H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+            elink_fapl_id = H5.H5Pget_elink_fapl(plapl_id);
+            assertTrue("H5P_elink_file_cache_size", elink_fapl_id >= 0);
+            try {
+                efc_size = H5.H5Pget_elink_file_cache_size(elink_fapl_id);
+                assertTrue("H5P_elink_file_cache_size default", efc_size == 0);
+            }
+            catch (UnsupportedOperationException err) {
+                System.out.println(err.getMessage());
+            }
+            try {
+                efc_size = 8;
+                H5.H5Pset_elink_file_cache_size(elink_fapl_id, efc_size);
+                efc_size = H5.H5Pget_elink_file_cache_size(elink_fapl_id);
+                assertTrue("H5P_elink_file_cache_size 8", efc_size == 8);
+            }
+            catch (UnsupportedOperationException err) {
+                System.out.println(err.getMessage());
+            }
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_elink_file_cache_size: " + err);
+        }
+        finally {
+            if (elink_fapl_id > 0)
+                try {H5.H5Pclose(elink_fapl_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5P_btree_ratios() {
+        double[] left = {0.1};
+        double[] middle = {0.5};
+        double[] right = {0.7};
+        try {
+            H5.H5Pset_btree_ratios(plist_id, left[0], middle[0], right[0]);
+            H5.H5Pget_btree_ratios(plist_id, left, middle, right);
+            assertTrue("H5P_btree_ratios", left[0] == 0.1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_btree_ratios: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_edc_check() {
+        int ret_val_id = -1;
+        try {
+            ret_val_id = H5.H5Pget_edc_check(plist_id);
+            assertTrue("H5P_edc_check", ret_val_id == HDF5Constants.H5Z_ENABLE_EDC);
+            H5.H5Pset_edc_check(plist_id, HDF5Constants.H5Z_DISABLE_EDC);
+            ret_val_id = H5.H5Pget_edc_check(plist_id);
+            assertTrue("H5P_edc_check", ret_val_id == HDF5Constants.H5Z_DISABLE_EDC);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_edc_check: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_fclose_degree() {
+        int ret_val_id = -1;
+        try {
+            ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+            assertTrue("H5Pget_fclose_degree default", ret_val_id == HDF5Constants.H5F_CLOSE_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_fclose_degree: default " + err);
+        }
+        try {
+            H5.H5Pset_fclose_degree(fapl_id, HDF5Constants.H5F_CLOSE_STRONG);
+            ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+            assertTrue("H5Pget_fclose_degree", ret_val_id == HDF5Constants.H5F_CLOSE_STRONG);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_fclose_degree: H5F_CLOSE_STRONG " + err);
+        }
+        try {
+            H5.H5Pset_fclose_degree(fapl_id, HDF5Constants.H5F_CLOSE_SEMI);
+            ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+            assertTrue("H5Pget_fclose_degree", ret_val_id == HDF5Constants.H5F_CLOSE_SEMI);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_fclose_degree: H5F_CLOSE_SEMI " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_alignment() {
+        long[] align = {0,0};
+        try {
+            H5.H5Pget_alignment(fapl_id, align);
+            assertTrue("H5P_alignment threshold default", align[0] == 1);
+            assertTrue("H5P_alignment alignment default", align[1] == 1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_alignment: default " + err);
+        }
+        try {
+            align[0] = 1024;
+            align[1] = 2048;
+            H5.H5Pset_alignment(fapl_id, align[0], align[1]);
+            H5.H5Pget_alignment(fapl_id, align);
+            assertTrue("H5P_alignment threshold", align[0] == 1024);
+            assertTrue("H5P_alignment alignment", align[1] == 2048);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_alignment: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_meta_block_size() {
+        long meta_size = 0;
+        try {
+            meta_size = H5.H5Pget_meta_block_size(fapl_id);
+            assertTrue("H5P_meta_block_size default", meta_size == 2048);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_meta_block_size: default " + err);
+        }
+        try {
+            meta_size = 4096;
+            H5.H5Pset_meta_block_size(fapl_id, meta_size);
+            meta_size = H5.H5Pget_meta_block_size(fapl_id);
+            assertTrue("H5P_meta_block_size 4096", meta_size == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_meta_block_size: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_small_data_block_size() {
+        long[] align = {0};
+        try {
+            H5.H5Pget_small_data_block_size(fapl_id, align);
+            assertTrue("H5P_small_data_block_size default", align[0] == 2048);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_small_data_block_size: default " + err);
+        }
+        try {
+            align[0] = 4096;
+            H5.H5Pset_small_data_block_size(fapl_id, align[0]);
+            H5.H5Pget_small_data_block_size(fapl_id, align);
+            assertTrue("H5P_small_data_block_size 4096", align[0] == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_small_data_block_size: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_hyper_vector_size() {
+        long[] align = {0};
+        try {
+            H5.H5Pget_hyper_vector_size(plist_id, align);
+            assertTrue("H5P_hyper_vector_size default", align[0] == 1024);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_hyper_vector_size: default " + err);
+        }
+        try {
+            align[0] = 4096;
+            H5.H5Pset_hyper_vector_size(plist_id, align[0]);
+            H5.H5Pget_hyper_vector_size(plist_id, align);
+            assertTrue("H5P_hyper_vector_size 4096", align[0] == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_hyper_vector_size: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_cache() {
+        long[] rdcc_nelmts = {0};
+        long[] rdcc_nbytes = {0};
+        double[] rdcc_w0 = {0};
+        try {
+            H5.H5Pget_cache(fapl_id, null, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+            assertTrue("H5P_cache default", rdcc_nelmts[0] == 521);
+            assertTrue("H5P_cache default", rdcc_nbytes[0] == (1024*1024));
+            assertTrue("H5P_cache default", rdcc_w0[0] == 0.75);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_cache: default " + err);
+        }
+        try {
+            rdcc_nelmts[0] = 4096;
+            H5.H5Pset_cache(fapl_id, 0, rdcc_nelmts[0], rdcc_nbytes[0], rdcc_w0[0]);
+            H5.H5Pget_cache(fapl_id, null, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+            assertTrue("H5P_cache 4096", rdcc_nelmts[0] == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_cache: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_chunk_cache() {
+        long[] rdcc_nslots = {0};
+        long[] rdcc_nbytes = {0};
+        double[] rdcc_w0 = {0};
+        try {
+            H5.H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0);
+            assertTrue("H5P_chunk_cache default", rdcc_nslots[0] == 521);
+            assertTrue("H5P_chunk_cache default", rdcc_nbytes[0] == (1024*1024));
+            assertTrue("H5P_chunk_cache default", rdcc_w0[0] == 0.75);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_chunk_cache: default " + err);
+        }
+        try {
+            rdcc_nslots[0] = 4096;
+            H5.H5Pset_chunk_cache(dapl_id, rdcc_nslots[0], rdcc_nbytes[0], rdcc_w0[0]);
+            H5.H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0);
+            assertTrue("H5P_chunk_cache 4096", rdcc_nslots[0] == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_chunk_cache: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_sieve_buf_size() {
+        long buf_size = 0;
+        try {
+            buf_size = H5.H5Pget_sieve_buf_size(fapl_id);
+            assertTrue("H5P_sieve_buf_size default", buf_size == (64*1024));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_sieve_buf_size: default " + err);
+        }
+        try {
+            buf_size = 4096;
+            H5.H5Pset_sieve_buf_size(fapl_id, buf_size);
+            buf_size = H5.H5Pget_sieve_buf_size(fapl_id);
+            assertTrue("H5P_sieve_buf_size 4096", buf_size == 4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_sieve_buf_size: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_gc_references() {
+        boolean ret_val_id = false;
+        try {
+            H5.H5Pset_gc_references(fapl_id, true);
+            ret_val_id = H5.H5Pget_gcreferences(fapl_id);
+            assertTrue("H5P_gc_references", ret_val_id);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5P_gc_references: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5Pget_mdc_config() {
+        H5AC_cache_config_t cache_config = null;
+        try {
+            cache_config = H5.H5Pget_mdc_config(fapl_id);
+            assertTrue("H5Pget_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_mdc_config: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5Pset_mdc_config() {
+        H5AC_cache_config_t cache_config = null;
+        try {
+            cache_config = H5.H5Pget_mdc_config(fapl_id);
+            assertTrue("H5Pset_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_mdc_config: " + err);
+        }
+        try {
+            cache_config.decr_mode = HDF5Constants.H5C_decr_off;
+            H5.H5Pset_mdc_config(fapl_id, cache_config);
+            cache_config = H5.H5Pget_mdc_config(fapl_id);
+            assertTrue("H5Pset_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+            assertTrue("H5Pset_mdc_config", cache_config.decr_mode==HDF5Constants.H5C_decr_off);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_mdc_config: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_fapl_core() {
+        if (HDF5Constants.H5FD_CORE < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_core(fapl_id, 4096, false);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: core = "+ driver_type, HDF5Constants.H5FD_CORE==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_core: " + err);
+        }
+        try {
+            long[] increment = {-1};
+            boolean[] backingstore = {true};
+            H5.H5Pget_fapl_core(fapl_id, increment, backingstore);
+            assertTrue("H5Pget_fapl_core: increment="+increment[0], increment[0]==4096);
+            assertTrue("H5Pget_fapl_core: backingstore="+backingstore[0], !backingstore[0]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_core: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5P_fapl_family() {
+        if (HDF5Constants.H5FD_FAMILY < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_family(fapl_id, 1024, HDF5Constants.H5P_DEFAULT);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: family = "+ driver_type, HDF5Constants.H5FD_FAMILY==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_family: " + err);
+        }
+        try {
+            long[] member_size = {0};
+            int[] member_fapl = {-1};
+            H5.H5Pget_fapl_family(fapl_id, member_size, member_fapl);
+            assertTrue("H5Pget_fapl_family: member_size="+member_size[0], member_size[0]==1024);
+            assertTrue("H5Pget_fapl_family: member_fapl ", H5.H5P_equal(member_fapl[0], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_family: " + err);
+        }
+        _createH5familyFile(fapl_id);
+        deleteH5familyfile();
+    }
+    
+    @Test
+    public void testH5P_family_offset() {
+        if (HDF5Constants.H5FD_FAMILY < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_family(fapl_id, 1024, HDF5Constants.H5P_DEFAULT);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: family = "+ driver_type, HDF5Constants.H5FD_FAMILY==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_family: " + err);
+        }
+        _createH5familyFile(fapl_id);
+        long family_offset = 512;
+        try {
+            H5.H5Pset_family_offset(fapl_id, family_offset);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_family: " + err);
+        }
+        try {
+            long offset = H5.H5Pget_family_offset(fapl_id);
+            assertTrue("H5Pget_fapl_family: offset="+offset, offset==family_offset);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_family: " + err);
+        }
+        deleteH5familyfile();
+    }
+    
+    @Test
+    public void testH5Pset_fapl_sec2() {
+        if (HDF5Constants.H5FD_SEC2 < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_sec2(fapl_id);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: sec2 = "+ driver_type, HDF5Constants.H5FD_SEC2==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_sec2: " + err);
+        }
+        _createH5File(fapl_id);
+        deleteH5file();
+    }
+    
+    @Test
+    public void testH5Pset_fapl_stdio() {
+        if (HDF5Constants.H5FD_STDIO < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_stdio(fapl_id);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: stdio = "+ driver_type, HDF5Constants.H5FD_STDIO==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_stdio: " + err);
+        }
+        _createH5File(fapl_id);
+        deleteH5file();
+    }
+    
+    @Test
+    public void testH5Pset_fapl_log() {
+        if (HDF5Constants.H5FD_LOG < 0)
+            return;
+        try {
+            long log_flags = HDF5Constants.H5FD_LOG_LOC_IO;
+            H5.H5Pset_fapl_log(fapl_id, H5_LOG_FILE, log_flags, 1024);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: log = "+ driver_type, HDF5Constants.H5FD_LOG==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_log: " + err);
+        }
+        _createH5File(fapl_id);
+        deleteH5file();
+        _deleteLogFile();
+    }
+    
+    @Test
+    public void testH5P_fapl_muti_nulls() {
+        if (HDF5Constants.H5FD_MULTI < 0)
+            return;
+        
+        int[] member_map = null;
+        int[] member_fapl = null;
+        String[] member_name = null;
+        long[] member_addr = null;
+        
+        try {
+            H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_muti: " + err);
+        }
+        try {
+            boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+            assertTrue("H5Pget_fapl_muti: relax ", relax);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_muti: " + err);
+        }
+        _createH5multiFile(fapl_id);
+        deleteH5multifile();
+    }
+    
+    @Test
+    public void testH5P_fapl_muti_defaults() {
+        if (HDF5Constants.H5FD_MULTI < 0)
+            return;
+        long HADDRMAX = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+        int[] member_map = null;
+        int[] member_fapl = null;
+        String[] member_name = null;
+        long[] member_addr = null;
+        
+        try {
+            H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_muti: " + err);
+        }
+        try {
+            member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+            member_fapl = new int[HDF5Constants.H5FD_MEM_NTYPES];
+            member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+            member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+            boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+            assertTrue("H5Pget_fapl_muti: relax ", relax);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_DEFAULT);
+            assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DEFAULT], member_name[HDF5Constants.H5FD_MEM_DEFAULT].compareTo("%s-X.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-s.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-b.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-r.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == 0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_SUPER], member_addr[HDF5Constants.H5FD_MEM_SUPER] == 0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE]+"<>"+HADDRMAX, member_addr[HDF5Constants.H5FD_MEM_BTREE] == HADDRMAX);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == (HADDRMAX-1));
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == (HADDRMAX-1));
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_LHEAP], member_addr[HDF5Constants.H5FD_MEM_LHEAP] == (HADDRMAX-1));
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_OHDR], member_addr[HDF5Constants.H5FD_MEM_OHDR] == (HADDRMAX-2));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_muti: " + err);
+        }
+        _createH5multiFile(fapl_id);
+        _createH5multiFileDS();
+        deleteH5multifile();
+    }
+    
+    @Test
+    public void testH5P_fapl_muti() {
+        if (HDF5Constants.H5FD_MULTI < 0)
+            return;
+        long HADDRMAX = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+        
+        int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+        int[] member_fapl = new int[HDF5Constants.H5FD_MEM_NTYPES];
+        String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+        long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+
+        for(int mt=HDF5Constants.H5FD_MEM_DEFAULT; mt<HDF5Constants.H5FD_MEM_NTYPES; mt++) {
+            member_fapl[mt] = HDF5Constants.H5P_DEFAULT;
+            member_map[mt] = HDF5Constants.H5FD_MEM_SUPER;
+        }
+        member_map[HDF5Constants.H5FD_MEM_DRAW] = HDF5Constants.H5FD_MEM_DRAW;
+        member_map[HDF5Constants.H5FD_MEM_BTREE] = HDF5Constants.H5FD_MEM_BTREE;
+        member_map[HDF5Constants.H5FD_MEM_GHEAP] = HDF5Constants.H5FD_MEM_GHEAP;
+
+        member_name[HDF5Constants.H5FD_MEM_SUPER] = new String("%s-super.h5");
+        member_addr[HDF5Constants.H5FD_MEM_SUPER] = 0;
+
+        member_name[HDF5Constants.H5FD_MEM_BTREE] = new String("%s-btree.h5");
+        member_addr[HDF5Constants.H5FD_MEM_BTREE] = HADDRMAX/4;
+
+        member_name[HDF5Constants.H5FD_MEM_DRAW] = new String("%s-draw.h5");
+        member_addr[HDF5Constants.H5FD_MEM_DRAW] = HADDRMAX/2;
+
+        member_name[HDF5Constants.H5FD_MEM_GHEAP] = new String("%s-gheap.h5");
+        member_addr[HDF5Constants.H5FD_MEM_GHEAP] = (HADDRMAX/4)*3;
+        
+        try {
+            H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_muti: " + err);
+        }
+        try {
+            boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+            assertTrue("H5Pget_fapl_muti: relax ", relax);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_SUPER);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DRAW], member_map[HDF5Constants.H5FD_MEM_DRAW] == HDF5Constants.H5FD_MEM_DRAW);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_BTREE], member_map[HDF5Constants.H5FD_MEM_BTREE] == HDF5Constants.H5FD_MEM_BTREE);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_GHEAP], member_map[HDF5Constants.H5FD_MEM_GHEAP] == HDF5Constants.H5FD_MEM_GHEAP);
+
+            assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == 0);
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-super.h5")==0);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-btree.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE], member_addr[HDF5Constants.H5FD_MEM_BTREE] == HADDRMAX/4);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-draw.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == HADDRMAX/2);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_GHEAP], member_name[HDF5Constants.H5FD_MEM_GHEAP].compareTo("%s-gheap.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == (HADDRMAX/4)*3);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_muti: " + err);
+        }
+        _createH5multiFile(fapl_id);
+        try {
+            long file_size = H5.H5Fget_filesize(H5fid);
+            assertTrue("H5Pget_fapl_muti: file_size ", file_size >= HADDRMAX/4 || file_size <= HADDRMAX/2);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_muti:H5Fget_filesize " + err);
+        }
+        _createH5multiFileDS();
+        deleteH5multifile();
+        File file = new File(H5_MULTI_FILE+"-super.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-btree.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-draw.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-gheap.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+    }
+    
+    @Test
+    public void testH5P_fapl_split() {
+        if (HDF5Constants.H5FD_MULTI < 0)
+            return;
+        
+        try {
+            H5.H5Pset_fapl_split(fapl_id, "-meta.h5", HDF5Constants.H5P_DEFAULT, "-raw.h5", HDF5Constants.H5P_DEFAULT);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: split = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_split: " + err);
+        }
+        try {
+            int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+            int[] member_fapl = new int[HDF5Constants.H5FD_MEM_NTYPES];
+            String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+            long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+            boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+            assertTrue("H5Pget_fapl_multi: relax ", relax);
+            assertTrue("H5Pget_fapl_multi: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-meta.h5")==0);
+            assertTrue("H5Pget_fapl_multi: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-raw.h5")==0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_split: " + err);
+        }
+        _createH5multiFile(fapl_id);
+        deleteH5multifile();
+        File file = new File(H5_MULTI_FILE+"-meta.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-raw.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+    }
+    
+    @Test
+    public void testH5P_fapl_direct() {
+        if (HDF5Constants.H5FD_DIRECT < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_direct(fapl_id, 1024, 4096, 8*4096);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: direct = "+ driver_type, HDF5Constants.H5FD_DIRECT==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_direct: " + err);
+        }
+        try {
+            long[] params = {-1, -1, -1};
+            H5.H5Pget_fapl_direct(fapl_id, params);
+            assertTrue("H5Pget_fapl_direct: alignment="+params[0], params[0]==1024);
+            assertTrue("H5Pget_fapl_direct: block_size="+params[1], params[1]==4096);
+            assertTrue("H5Pget_fapl_direct: cbuf_size="+params[2], params[2]==8*4096);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_direct: " + err);
+        }
+        _createH5File(fapl_id);
+        deleteH5file();
+    }
+    
+    @Test
+    public void testH5Pset_fapl_windows() {
+        if (HDF5Constants.H5FD_WINDOWS < 0)
+            return;
+        try {
+            H5.H5Pset_fapl_windows(fapl_id);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: windows = "+ driver_type, HDF5Constants.H5FD_WINDOWS==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_windows: " + err);
+        }
+        _createH5File(fapl_id);
+        deleteH5file();
+    }
+    
+    @Test
+    public void testH5Pmulti_transform() {
+        if (HDF5Constants.H5FD_MULTI < 0)
+            return;
+        String f_to_c = "(5/9.0)*(x-32)";
+        double windchillFread[][] = new double[DIMF_X][DIMF_Y];
+        double windchillC;
+        NumberFormat formatter = new DecimalFormat("#0.000");
+        long HADDRMAX = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+
+        int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+        int[] member_fapl = new int[HDF5Constants.H5FD_MEM_NTYPES];
+        String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+        long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+        int[] member_dxpl = new int[HDF5Constants.H5FD_MEM_NTYPES];
+
+        try {
+            H5.H5Pset_data_transform(plist_id, f_to_c);
+            H5.H5Pset_btree_ratios(btplist_id, 0.1, 0.5, 0.7);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pdata_transform: " + err);
+        }
+
+        for(int mt=HDF5Constants.H5FD_MEM_DEFAULT; mt<HDF5Constants.H5FD_MEM_NTYPES; mt++) {
+            member_fapl[mt] = HDF5Constants.H5P_DEFAULT;
+            member_dxpl[mt] = HDF5Constants.H5P_DEFAULT;
+            member_map[mt] = HDF5Constants.H5FD_MEM_SUPER;
+        }
+        member_map[HDF5Constants.H5FD_MEM_DRAW] = HDF5Constants.H5FD_MEM_DRAW;
+        member_map[HDF5Constants.H5FD_MEM_BTREE] = HDF5Constants.H5FD_MEM_BTREE;
+        member_map[HDF5Constants.H5FD_MEM_GHEAP] = HDF5Constants.H5FD_MEM_GHEAP;
+
+        member_name[HDF5Constants.H5FD_MEM_SUPER] = new String("%s-super.h5");
+        member_addr[HDF5Constants.H5FD_MEM_SUPER] = 0;
+
+        member_name[HDF5Constants.H5FD_MEM_BTREE] = new String("%s-btree.h5");
+        member_addr[HDF5Constants.H5FD_MEM_BTREE] = HADDRMAX/4;
+
+        member_name[HDF5Constants.H5FD_MEM_DRAW] = new String("%s-draw.h5");
+        member_addr[HDF5Constants.H5FD_MEM_DRAW] = HADDRMAX/2;
+
+        member_name[HDF5Constants.H5FD_MEM_GHEAP] = new String("%s-gheap.h5");
+        member_addr[HDF5Constants.H5FD_MEM_GHEAP] = (HADDRMAX/4)*3;
+
+        try {
+            H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+            int driver_type = H5.H5Pget_driver(fapl_id);
+            assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_fapl_muti: " + err);
+        }
+        try {
+            member_dxpl[HDF5Constants.H5FD_MEM_DRAW] = plist_id;
+            member_dxpl[HDF5Constants.H5FD_MEM_BTREE] = btplist_id;
+//            H5.H5Pset_dxpl_multi(multi_dxplid, member_dxpl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pset_dxpl_muti: " + err);
+        }
+        try {
+            boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+            assertTrue("H5Pget_fapl_muti: relax ", relax);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_SUPER);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DRAW], member_map[HDF5Constants.H5FD_MEM_DRAW] == HDF5Constants.H5FD_MEM_DRAW);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_BTREE], member_map[HDF5Constants.H5FD_MEM_BTREE] == HDF5Constants.H5FD_MEM_BTREE);
+            assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_GHEAP], member_map[HDF5Constants.H5FD_MEM_GHEAP] == HDF5Constants.H5FD_MEM_GHEAP);
+
+            assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == 0);
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-super.h5")==0);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-btree.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE], member_addr[HDF5Constants.H5FD_MEM_BTREE] == HADDRMAX/4);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-draw.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == HADDRMAX/2);
+
+            assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_GHEAP], member_name[HDF5Constants.H5FD_MEM_GHEAP].compareTo("%s-gheap.h5")==0);
+            assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == (HADDRMAX/4)*3);
+
+            assertTrue("H5Pget_dxpl_muti: member_dxpl=", H5.H5P_equal(member_dxpl[HDF5Constants.H5FD_MEM_BTREE], btplist_id));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pget_fapl_muti: " + err);
+        }
+        try {
+            _createH5multiFile(fapl_id);
+            long file_size = H5.H5Fget_filesize(H5fid);
+            assertTrue("H5Pget_fapl_muti: file_size ", file_size >= HADDRMAX/4 || file_size <= HADDRMAX/2);
+            _createH5multiFileDS();
+            _createFloatDataset();
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pdata_transform: " + err);
+        }
+        try {
+            H5.H5Dwrite(H5Fdid, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    member_dxpl[HDF5Constants.H5FD_MEM_DRAW], windchillF);
+            H5.H5Dread(H5Fdid, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+                    HDF5Constants.H5P_DEFAULT, windchillFread);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5Pdata_transform: " + err);
+        }
+        for(int row = 0; row < DIMF_X; row++) {
+            for(int col = 0; col < DIMF_Y; col++) {
+                windchillC = (5/9.0)*(windchillF[row][col]-32);
+                String Cstr = formatter.format(windchillC);
+                String Fread = formatter.format(windchillFread[row][col]);
+                assertTrue("H5Pdata_transform: <"+row+","+col+">"+Fread+"="+Cstr, Fread.compareTo(Cstr)==0);
+            }
+        }
+        deleteH5multifile();
+        File file = new File(H5_MULTI_FILE+"-super.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-btree.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-draw.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+        file = new File(H5_MULTI_FILE+"-gheap.h5");
+        if (file.exists()) {
+            try {
+                file.delete();
+            }
+            catch (SecurityException e) {
+                ;// e.printStackTrace();
+            }
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5R.java b/sourceTest/java/test/hdf5lib/TestH5R.java
new file mode 100644
index 0000000..f811dc9
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5R.java
@@ -0,0 +1,318 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5R {
+    private static final String H5_FILE = "testH5R.h5";
+    private static final int DIM_X = 4;
+    private static final int DIM_Y = 6;
+    int H5fid = -1;
+    int H5dsid = -1;
+    int H5did = -1;
+    int H5gid = -1;
+    int H5did2 = -1;
+    long[] H5dims = { DIM_X, DIM_Y };
+
+    private final void _deleteFile(String filename) {
+        File file = null;
+        try {
+            file = new File(filename);
+        } 
+        catch (Throwable err) {}
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    private final int _createDataset(int fid, int dsid, String name, int dapl) {
+        int did = -1;
+        try {
+            did = H5.H5Dcreate(fid, name,
+                        HDF5Constants.H5T_STD_I32BE, dsid,
+                        HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Dcreate: " + err);
+        }
+        assertTrue("TestH5R._createDataset: ",did > 0);
+
+        return did;
+    }
+    
+    private final int _createGroup(int fid, String name) {
+        int gid = -1;
+        try {
+            gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Gcreate: " + err);
+        }
+        assertTrue("TestH5R._createGroup: ",gid > 0);
+
+        return gid;
+    }
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+
+        try {
+            H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+            H5dsid = H5.H5Screate_simple(2, H5dims, null);
+            H5gid = _createGroup(H5fid, "Group1");
+            H5did2 = _createDataset(H5gid, H5dsid, "dset2", HDF5Constants.H5P_DEFAULT);
+            H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+          
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5D.createH5file: " + err);
+        }
+        assertTrue("TestH5D.createH5file: H5.H5Fcreate: ",H5fid > 0);
+        assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+        assertTrue("TestH5D.createH5file: _createDataset: ",H5did > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5dsid > 0) 
+            try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+        if (H5did > 0) 
+            try {H5.H5Dclose(H5did);} catch (Exception ex) {}      
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+        if (H5gid > 0) 
+            try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+        if (H5did2 > 0) 
+            try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+ 
+        _deleteFile(H5_FILE);
+    }
+
+    @Test
+    public void testH5Rget_name() {
+        int loc_id=H5fid;
+        int ref_type=HDF5Constants.H5R_OBJECT;
+        long ret_val=-1;
+        byte[] ref=null;
+        String[] name= {""};
+        String objName = "/dset";
+        
+        try {
+            ref = H5.H5Rcreate(H5fid, objName, ref_type, -1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Rget_name:H5Rcreate " + err);
+        }
+        
+        try {
+            ret_val = H5.H5Rget_name(loc_id, ref_type, ref, name, 16);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Rget_name: " + err);
+        }
+        
+        assertTrue("testH5Rget_name: H5Rget_name", ret_val>0);
+        assertTrue("The name of the object: ", objName.equals(name[0]));
+    }
+    
+    @Test
+    public void testH5Rget_obj_type2() {
+        int  ref_type=HDF5Constants.H5R_OBJECT;
+        byte[] ref=null;
+
+        String objName = "/dset";
+        int obj_type = -1;;
+        int[] otype = { 1 };
+
+        try {
+            ref = H5.H5Rcreate(H5fid, objName, ref_type, -1);
+        }
+        catch(Throwable err) {
+            err.printStackTrace();
+        }
+
+        try {
+            obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, ref, otype);
+        } 
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Rget_obj_type2: " + err);
+        }
+        assertEquals(obj_type, HDF5Constants.H5O_TYPE_DATASET);        
+    }
+
+    @Test
+    public void testH5Rcreate_refobj() {
+        byte[] ref = null;
+
+        try {
+            ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Rcreate: " + err);
+        }
+        assertNotNull(ref);      
+    }
+    
+    @Test
+    public void testH5Rcreate_regionrefobj() {
+        byte[] ref = null;
+        try {
+            ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Rcreate: " + err);
+        }
+        assertNotNull(ref);
+    }
+    
+    @Test
+    public void testH5Rdereference() {
+        byte[] ref1 = null;
+        byte[] ref2 = null;
+        int dataset_id = -1;
+        int group_id = -1;
+        try {
+            //Create reference on dataset 
+            ref1 = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+            dataset_id= H5.H5Rdereference(H5fid, HDF5Constants.H5R_DATASET_REGION, ref1);
+            
+            //Create reference on group
+            ref2 = H5.H5Rcreate(H5gid, "/Group1", HDF5Constants.H5R_OBJECT, -1);           
+            group_id= H5.H5Rdereference(H5gid, HDF5Constants.H5R_OBJECT, ref2);
+            assertNotNull(ref1);
+            assertNotNull(ref2);
+            assertTrue(dataset_id>=0);
+            assertTrue(group_id>=0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5Rdereference " + err);
+        }
+        finally {
+            try {H5.H5Dclose(dataset_id);} catch (Exception ex) {}
+            try {H5.H5Gclose(group_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Rget_region() {
+        byte[] ref = null;
+        int dsid = -1;
+        try {
+            ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+            dsid = H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+            assertNotNull(ref);
+            assertTrue(dsid>=0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("TestH5Rget_region: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(dsid);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Rget_name_Invalidreftype() throws Throwable {
+        byte[] ref = null;
+        String[] name= {""};
+        ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+        H5.H5Rget_name(H5fid, HDF5Constants.H5R_DATASET_REGION, ref, name, 16);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Rget_name_NULLreference() throws Throwable {
+        byte[] ref = null;
+        String[] name= {""};     
+        H5.H5Rget_name(H5fid, HDF5Constants.H5R_OBJECT, ref, name, 16);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Rget_obj_type2_Invalidreftype() throws Throwable {
+        byte[] ref = null;
+        int[] otype = { 1 };
+        ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+        H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_DATASET_REGION, ref, otype);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Rcreate_InvalidObjectName() throws Throwable {
+         H5.H5Rcreate(H5fid, "/GROUPS", HDF5Constants.H5R_OBJECT, -1);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Rcreate_Invalidspace_id() throws Throwable {
+         H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, -1);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Rcreate_Invalidreftype() throws Throwable {
+        H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_BADTYPE, -1);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Rgetregion_Invalidreftype() throws Throwable {
+        byte[] ref = null;
+        ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, H5dsid);
+        H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Rgetregion_Badreferencetype() throws Throwable {
+        byte[] ref = null;
+        ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, H5dsid);
+        H5.H5Rget_region(H5fid, HDF5Constants.H5R_OBJECT, ref);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Rgetregion_Nullreference() throws Throwable {
+        byte[] ref = null;
+        H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+    }
+    
+    @Test(expected = NullPointerException.class)
+    public void testH5Rdereference_Nullreference() throws Throwable {
+        byte[] ref = null;
+        H5.H5Rdereference(H5did2, HDF5Constants.H5R_OBJECT, ref);
+    }
+    
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Rdereference_Invalidreference() throws Throwable {
+        byte[] ref1 = null;
+        byte[] ref2 = null;
+        ref1 = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+        ref2 = H5.H5Rcreate(H5gid, "/Group1", HDF5Constants.H5R_OBJECT, -1);
+        H5.H5Rdereference(H5gid, HDF5Constants.H5R_OBJECT, ref1);
+    }
+  
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5S.java b/sourceTest/java/test/hdf5lib/TestH5S.java
new file mode 100644
index 0000000..58a99d6
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5S.java
@@ -0,0 +1,537 @@
+/**
+ * 
+ */
+package test.hdf5lib;
+
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5S {
+    int H5sid = -1;
+    int H5rank = 2;
+    long H5dims[] = {5, 5};
+    long H5maxdims[] = {10, 10};
+
+    @Before
+    public void createH5file()
+            throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
+
+        H5sid = H5.H5Screate_simple(H5rank, H5dims, H5maxdims);
+        assertTrue("H5.H5Screate_simple_extent", H5sid > 0);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5sid > 0) {
+            try {H5.H5Sclose(H5sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sget_simple_extent_ndims() {
+        int read_rank = -1;
+        try {
+            read_rank = H5.H5Sget_simple_extent_ndims(H5sid);
+            assertTrue("H5.H5Sget_simple_extent_ndims", H5rank == read_rank);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_simple_extent_ndims: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_simple_extent_dims_null() {
+        int read_rank = -1;
+        
+        try {
+            read_rank = H5.H5Sget_simple_extent_dims(H5sid, null, null);
+            assertTrue("H5.H5Sget_simple_extent_dims", H5rank == read_rank);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_simple_extent_dims: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_simple_extent_dims() {
+        int read_rank = -1;
+        long dims[] = {5, 5};
+        long maxdims[] = {10, 10};
+        
+        try {
+            read_rank = H5.H5Sget_simple_extent_dims(H5sid, dims, maxdims);
+            assertTrue("H5.H5Sget_simple_extent_dims", H5rank == read_rank);
+            assertTrue("H5.H5Sget_simple_extent_dims:dims", H5dims[0] == dims[0]);
+            assertTrue("H5.H5Sget_simple_extent_dims:maxdims", H5maxdims[0] == maxdims[0]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_simple_extent_dims: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_simple_extent_npoints() {
+        long num_elements = -1;
+        try {
+            num_elements = H5.H5Sget_simple_extent_npoints(H5sid);
+            assertTrue("H5.H5Sget_simple_extent_npoints", (H5dims[0]*H5dims[1]) == num_elements);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_simple_extent_npoints: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_simple_extent_type() {
+        int read_type = -1;
+        try {
+            read_type = H5.H5Sget_simple_extent_type(H5sid);
+            assertTrue("H5.H5Sget_simple_extent_type", HDF5Constants.H5S_SIMPLE == read_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_simple_extent_type: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sis_simple() {
+        boolean result = false;
+        
+        try {
+            result = H5.H5Sis_simple(H5sid);
+            assertTrue("H5.H5Sis_simple", result);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sis_simple: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sset_extent_simple() {
+        long num_elements = -1;
+        try {
+            H5.H5Sset_extent_simple(H5sid, H5rank, H5maxdims, H5maxdims);
+            num_elements = H5.H5Sget_simple_extent_npoints(H5sid);
+            assertTrue("H5.H5Sget_simple_extent_npoints", (H5maxdims[0]*H5maxdims[1]) == num_elements);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sset_extent_simple: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_select_type() {
+        int read_type = -1;
+        try {
+            read_type = H5.H5Sget_select_type(H5sid);
+            assertTrue("H5.H5Sget_select_type", HDF5Constants.H5S_SEL_ALL == read_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sset_extent_none: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sset_extent_none() {
+        int read_type = -1;
+        try {
+            H5.H5Sset_extent_none(H5sid);
+            read_type = H5.H5Sget_simple_extent_type(H5sid);
+            assertTrue("H5.H5Sget_simple_extent_type: "+read_type, HDF5Constants.H5S_NO_CLASS == read_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sset_extent_none: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Scopy() {
+        int sid = -1;
+        int read_rank = -1;
+
+        try {
+            sid = H5.H5Scopy(H5sid);
+            assertTrue("H5.H5Sis_simple", sid > 0);
+            read_rank = H5.H5Sget_simple_extent_ndims(sid);
+            assertTrue("H5.H5Screate_simple_extent_ndims", H5rank == read_rank);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Scopy: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sextent_copy() {
+        int sid = -1;
+        int class_type = -1;
+        
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+            assertTrue("H5.H5Screate_null", sid > 0);
+            H5.H5Sextent_copy(sid, H5sid);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_null: type", class_type == HDF5Constants.H5S_SIMPLE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sextent_copy: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sextent_equal() {
+        int sid = -1;
+        boolean result = false;
+        
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+            assertTrue("H5.H5Screate_null",sid > 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: null " + err);
+        }
+        
+        try {
+            result = H5.H5Sextent_equal(sid, H5sid);
+            assertFalse("H5.testH5Sextent_equal",result);
+            H5.H5Sextent_copy(sid, H5sid);
+            result = H5.H5Sextent_equal(sid, H5sid);
+            assertTrue("H5.testH5Sextent_equal", result);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sextent_copy " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sencode_decode_null_dataspace() {
+        int sid = -1;
+        int decoded_sid = -1;
+        byte[] null_sbuf = null;
+        boolean result = false;
+        
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+            assertTrue("H5.H5Screate_null", sid > 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: null " + err);
+        }
+        
+        try {
+            null_sbuf = H5.H5Sencode(sid);
+            assertFalse("H5.testH5Sencode", null_sbuf==null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sencode " + err);
+        }
+        finally {
+            if(null_sbuf == null) {
+                try {H5.H5Sclose(sid);} catch (Exception ex) {}
+            }
+        }
+        
+        try {
+            decoded_sid = H5.H5Sdecode(null_sbuf);
+            assertTrue("H5.testH5Sdecode", decoded_sid>0);
+
+            result = H5.H5Sextent_equal(sid, decoded_sid);
+            assertTrue("H5.testH5Sextent_equal", result);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sdecode " + err);
+        }
+        finally {
+            try {H5.H5Sclose(decoded_sid);} catch (Exception ex) {}
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sencode_decode_scalar_dataspace() {
+        int sid = -1;
+        int decoded_sid = -1;
+        byte[] scalar_sbuf = null;
+        boolean result = false;
+        int iresult = -1;
+        long lresult = -1;
+        
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+            assertTrue("H5.H5Screate_null", sid > 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: null " + err);
+        }
+        
+        try {
+            scalar_sbuf = H5.H5Sencode(sid);
+            assertFalse("H5.testH5Sencode", scalar_sbuf==null);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sencode " + err);
+        }
+        finally {
+            if(scalar_sbuf == null) {
+                try {H5.H5Sclose(sid);} catch (Exception ex) {}
+            }
+        }
+        
+        try {
+            decoded_sid = H5.H5Sdecode(scalar_sbuf);
+            assertTrue("H5.testH5Sdecode", decoded_sid>0);
+    
+            result = H5.H5Sextent_equal(sid, decoded_sid);
+            assertTrue("H5.testH5Sextent_equal", result);
+            
+            /* Verify decoded dataspace */
+            lresult = H5.H5Sget_simple_extent_npoints(decoded_sid);
+            assertTrue("H5.testH5Sget_simple_extent_npoints", lresult==1);
+    
+            iresult = H5.H5Sget_simple_extent_ndims(decoded_sid);
+            assertTrue("H5.testH5Sget_simple_extent_ndims", iresult==0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sdecode " + err);
+        }
+        finally {
+            try {H5.H5Sclose(decoded_sid);} catch (Exception ex) {}
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sselect_none() {
+        int read_type = -1;
+        try {
+            H5.H5Sselect_none(H5sid);
+            read_type = H5.H5Sget_select_type(H5sid);
+            assertTrue("H5.H5Sget_select_type: "+read_type, HDF5Constants.H5S_SEL_NONE == read_type);
+            H5.H5Sselect_all(H5sid);
+            read_type = H5.H5Sget_select_type(H5sid);
+            assertTrue("H5.H5Sget_select_type: "+read_type, HDF5Constants.H5S_SEL_ALL == read_type);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sset_extent_none: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_select_npoints() {
+        long coord[][] = {{0,1},{2,4},{5,6}}; /* Coordinates for point selection */
+        long num_elements = -1;
+        try {
+            H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+            num_elements = H5.H5Sget_select_npoints(H5sid);
+            assertTrue("H5.H5Sget_select_npoints: "+num_elements, 3 == num_elements);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_npoints: " + err);
+        }
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Sget_select_elem_pointlist_invalid() throws Throwable {
+        long coord[][] = {{0,1},{2,4},{5,6}}; /* Coordinates for point selection */
+        long getcoord[] = {-1,-1}; /* Coordinates for get point selection */
+        try {
+            H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_elem_pointlist: " + err);
+        }
+        H5.H5Sget_select_elem_pointlist(H5sid, 0, 3, getcoord);
+    }
+
+    @Test
+    public void testH5Sget_select_elem_pointlist() {
+        long coord[][] = {{0,1},{2,3},{4,5}}; /* Coordinates for point selection */
+        long getcoord[] = {-1,-1,-1,-1,-1,-1}; /* Coordinates for get point selection */
+        try {
+            H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+            H5.H5Sget_select_elem_pointlist(H5sid, 0, 3, getcoord);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[0][0] == getcoord[0]);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[0][1] == getcoord[1]);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[1][0] == getcoord[2]);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[1][1] == getcoord[3]);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[2][0] == getcoord[4]);
+            assertTrue("H5.H5Sget_select_elem_pointlist", coord[2][1] == getcoord[5]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_elem_pointlist: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_select_bounds() {
+        long lowbounds[] = {-1,-1}; 
+        long hibounds[] = {-1,-1}; 
+        try {
+            H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+            assertTrue("H5.H5Sget_select_bounds", 0 == lowbounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", 0 == lowbounds[1]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-1) == hibounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-1) == hibounds[1]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_bounds: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Soffset_simple() {
+        long coord[][] = {{2,2},{2,4},{4,2},{4,4}}; /* Coordinates for point selection */
+        long lowbounds[] = {-1,-1}; 
+        long hibounds[] = {-1,-1}; 
+        try {
+            H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 4, coord);
+            H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+            assertTrue("H5.H5Sget_select_bounds", 2 == lowbounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", 2 == lowbounds[1]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-1) == hibounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-1) == hibounds[1]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_bounds: " + err);
+        }
+        try {
+            long offset[] = {-1,-1};
+            H5.H5Soffset_simple(H5sid, offset);
+            H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+            assertTrue("H5.H5Sget_select_bounds", 1 == lowbounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", 1 == lowbounds[1]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-2) == hibounds[0]);
+            assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-2) == hibounds[1]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Soffset_simple: " + err);
+        }
+    }
+
+    @Test
+    public void testH5Sget_select_hyper() {
+        int space1 = -1;
+        long start[] = {0,0}; 
+        long stride[] = {1,1}; 
+        long count[] = {1,1}; 
+        long block[] = {4,4}; 
+        long nblocks;   // Number of hyperslab blocks 
+        long blocks[] = {-1, -1, -1, -1, -1, -1, -1, -1};    // List of blocks
+        try {
+            // Copy "all" selection & space
+            space1 = H5.H5Scopy(H5sid);
+            assertTrue("H5.H5Scopy", H5sid > 0);
+            // 'AND' "all" selection with another hyperslab
+            H5.H5Sselect_hyperslab(space1, HDF5Constants.H5S_SELECT_AND, start, stride, count, block);
+    
+            // Verify that there is only one block
+            nblocks = H5.H5Sget_select_hyper_nblocks(space1);
+            assertTrue("H5Sget_select_hyper_nblocks", nblocks == 1);
+    
+            // Retrieve the block defined
+            H5.H5Sget_select_hyper_blocklist(space1, 0, nblocks, blocks);
+    
+            // Verify that the correct block is defined 
+            assertTrue("H5.H5Sget_select_hyper_blocklist", start[0] == blocks[0]);
+            assertTrue("H5.H5Sget_select_hyper_blocklist", start[1] == blocks[1]);
+            assertTrue("H5.H5Sget_select_hyper_blocklist", (block[0]-1) == blocks[2]);
+            assertTrue("H5.H5Sget_select_hyper_blocklist", (block[1]-1) == blocks[3]);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Sget_select_bounds: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(space1);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Sget_select_valid() {
+        int space1 = -1;
+        long start[] = {1,0}; 
+        long stride[] = {1,1}; 
+        long count[] = {2,3}; 
+        long block[] = {1,1}; 
+        long offset[] = {0,0};    // Offset of selection
+
+        try {
+            // Copy "all" selection & space
+            space1 = H5.H5Scopy(H5sid);
+            assertTrue("H5.H5Scopy", H5sid > 0);
+            // 'AND' "all" selection with another hyperslab
+            H5.H5Sselect_hyperslab(space1, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+    
+            // Check a valid offset
+            offset[0]=-1; 
+            offset[1]=0;
+            H5.H5Soffset_simple(space1, offset);
+            assertTrue("H5Sselect_valid", H5.H5Sselect_valid(space1));
+    
+            // Check an invalid offset
+            offset[0]=10;
+            offset[1]=0;
+            H5.H5Soffset_simple(space1, offset);
+            assertFalse("H5Sselect_valid", H5.H5Sselect_valid(space1));
+
+            /* Reset offset */
+            offset[0]=0;
+            offset[1]=0;
+            H5.H5Soffset_simple(space1, offset);
+            assertTrue("H5Sselect_valid", H5.H5Sselect_valid(space1));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Sget_select_valid: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(space1);} catch (Exception ex) {}
+        }
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Sbasic.java b/sourceTest/java/test/hdf5lib/TestH5Sbasic.java
new file mode 100644
index 0000000..ffbd6f3
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Sbasic.java
@@ -0,0 +1,221 @@
+/**
+ * 
+ */
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5Sbasic {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Screate_invalid() throws Throwable {
+        H5.H5Screate(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Sget_simple_extent_type_invalid() throws Throwable {
+        H5.H5Sget_simple_extent_type(-1);
+    }
+
+    @Test
+    public void testH5Screate_scalar() {
+        int sid = -1;
+        int class_type = -1;
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+            assertTrue("H5.H5Screate_scalar",sid > 0);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_scalar: type",class_type == HDF5Constants.H5S_SCALAR);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Screate_null() {
+        int sid = -1;
+        int class_type = -1;
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+            assertTrue("H5.H5Screate_null", sid > 0);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_null: type", class_type == HDF5Constants.H5S_NULL);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Screate_simple_dims_null() throws Throwable {
+        H5.H5Screate_simple(2, (long[])null, null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Screate_simple_rank_invalid() throws Throwable {
+        long dims[] = {5, 5};
+        H5.H5Screate_simple(-1, dims, null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Screate_simple_dims_invalid() throws Throwable {
+        long dims[] = {2, 2};
+        H5.H5Screate_simple(5, dims, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Screate_simple_dims_exceed() throws Throwable {
+        long dims[] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,
+                21,22,23,24,25,26,27,28,29,30,31,32,33,35};
+        H5.H5Screate_simple(35, dims, null);
+    }
+
+//H5Screate_simple was changed to allow a dim of 0
+//    @Ignore(expected = HDF5LibraryException.class)
+//    public void testH5Screate_simple_dims_zero() {
+//        long dims[] = {0, 0};
+//        H5.H5Screate_simple(2, dims, null);
+//    }
+
+    @Test
+    public void testH5Screate_simple() {
+        int sid = -1;
+        int class_type = -1;
+        int rank = 2;
+        long dims[] = {5, 5};
+        long maxdims[] = {10, 10};
+        
+        try {
+            sid = H5.H5Screate_simple(rank, dims, maxdims);
+            assertTrue("H5.H5Screate_simple", sid > 0);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate_simple: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Screate_simple_unlimted() {
+        int sid = -1;
+        int class_type = -1;
+        int rank = 2;
+        long dims[] = {5, 5};
+        long maxdims[] = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+        
+        try {
+            sid = H5.H5Screate_simple(rank, dims, maxdims);
+            assertTrue("H5.H5Screate_simple", sid > 0);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate_simple: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Screate_simple_unlimted_1d() {
+        int sid = -1;
+        int class_type = -1;
+        int rank = 1;
+        long dims[] = {5};
+        long maxdims[] = {HDF5Constants.H5S_UNLIMITED};
+        
+        try {
+            sid = H5.H5Screate_simple(rank, dims, maxdims);
+            assertTrue("H5.H5Screate_simple", sid > 0);
+            class_type = H5.H5Sget_simple_extent_type(sid);
+            assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate_simple: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }   
+
+    @Test
+    public void testH5Screate_simple_max_default() {
+        int sid = -1;
+        int rank = 2;
+        long dims[] = {5, 5};
+        
+        try {
+            sid = H5.H5Screate_simple(rank, dims, null);
+            assertTrue("H5.H5Screate_simple_max_default", sid > 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate_simple: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Screate_simple_extent() {
+        int sid = -1;
+        int rank = 2;
+        long dims[] = {5, 5};
+        long maxdims[] = {10, 10};
+        
+        try {
+            sid = H5.H5Screate(HDF5Constants.H5S_SIMPLE);
+            assertTrue("H5.H5Screate_simple_extent",sid > 0);
+            H5.H5Sset_extent_simple(sid, rank, dims, maxdims);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Screate: " + err);
+        }
+        finally {
+            try {H5.H5Sclose(sid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Sencode_invalid() throws Throwable {
+        H5.H5Sencode(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Sdecode_null() throws Throwable {
+        H5.H5Sdecode(null);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5T.java b/sourceTest/java/test/hdf5lib/TestH5T.java
new file mode 100644
index 0000000..e294363
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5T.java
@@ -0,0 +1,434 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("all")
+public class TestH5T {
+    private static final String H5_FILE = "test.h5";
+    int H5fid = -1;
+    int H5strdid = -1;
+
+    private final void _deleteFile(String filename) {
+        File file = null;
+        try {
+            file = new File(filename);
+        } 
+        catch (Throwable err) {}
+
+        if (file.exists()) {
+            try {file.delete();} catch (SecurityException e) {}
+        }
+    }
+
+    @Before
+    public void createH5file() throws NullPointerException, HDF5Exception {
+        assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
+
+        H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+                HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+        assertTrue("H5.H5Fcreate", H5fid > 0);
+        H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+        assertTrue("H5.H5Tcopy", H5strdid > 0);
+
+        H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+    }
+
+    @After
+    public void deleteH5file() throws HDF5LibraryException {
+        if (H5strdid >= 0)
+            try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+        if (H5fid > 0) 
+            try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+        _deleteFile(H5_FILE);
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tequal_type_error() throws Throwable {
+        H5.H5Tequal(HDF5Constants.H5T_INTEGER, H5strdid);
+    }
+    
+    @Test
+    public void testH5Tget_class() {
+        try {
+            int result = H5.H5Tget_class(H5strdid);
+            assertTrue("H5.H5Tget_class", result > 0);
+            String class_name = H5.H5Tget_class_name(result);
+            assertTrue("H5.H5Tget_class", class_name.compareTo("H5T_STRING")==0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tget_class: " + err);
+        }
+    }
+    
+    @Test
+    public void testH5Tget_size() {
+        long dt_size = -1; 
+        
+        try {
+            dt_size = H5.H5Tget_size(H5strdid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tget_size:H5.H5Tget_size " + err);
+        }
+        assertTrue("testH5Tget_size", dt_size > 0);
+    }
+    
+    @Test
+    public void testH5Tset_size() {
+        long dt_size = 5; 
+        
+        try {
+            H5.H5Tset_size(H5strdid, dt_size);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tset_size:H5.H5Tset_size " + err);
+        }
+        try {
+            dt_size = H5.H5Tget_size(H5strdid);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tget_size:H5.H5Tget_size " + err);
+        }
+        assertTrue("testH5Tget_size", dt_size == 5);
+    }
+    
+    @Test
+    public void testH5Tarray_create() {
+       int filetype_id = -1;
+       long[] adims = { 3, 5 };
+
+       try {
+           filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+           assertTrue("testH5Tarray_create", filetype_id > 0);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tarray_create.H5Tarray_create " + err);
+       }
+       finally {
+           if (filetype_id >= 0)
+               try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+       }
+    }
+    
+    @Test
+    public void testH5Tget_array_ndims() {
+       int filetype_id = -1;
+       int ndims = 0;
+       long[] adims = { 3, 5 };
+
+       try {
+           filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tarray_create.H5Tarray_create " + err);
+       }
+       assertTrue("testH5Tget_array_ndims:H5Tarray_create", filetype_id > 0);
+       try {
+           ndims = H5.H5Tget_array_ndims(filetype_id);
+           assertTrue("testH5Tget_array_ndims", ndims == 2);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tget_array_ndims.H5Tget_array_ndims " + err);
+       }
+       finally {
+           if (filetype_id >= 0)
+               try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+       }
+    }
+    
+    @Test
+    public void testH5Tget_array_dims() {
+       int filetype_id = -1;
+       int ndims = 0;
+       long[] adims = { 3, 5 };
+       long[] rdims = new long[2];
+
+       try {
+           filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tarray_create.H5Tarray_create " + err);
+       }
+       assertTrue("testH5Tget_array_dims:H5Tarray_create", filetype_id > 0);
+       try {
+           ndims = H5.H5Tget_array_dims(filetype_id, rdims);
+           assertTrue("testH5Tget_array_dims", ndims == 2);
+           assertTrue("testH5Tget_array_dims", adims[0] == rdims[0]);
+           assertTrue("testH5Tget_array_dims", adims[1] == rdims[1]);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tget_array_dims.H5Tget_array_dims " + err);
+       }
+       finally {
+           if (filetype_id >= 0)
+               try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+       }
+    }
+    
+    @Test
+    public void testH5Tenum_functions() {
+        int       filetype_id =-1;
+        String    enum_type ="Enum_type";
+        byte[]    enum_val = new byte[1];
+        String    enum_name = null;
+
+        // Create a enumerate datatype
+        try {
+            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_ENUM, (long)1);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tenum_functions:H5Tcreate " + err);
+        }
+        assertTrue("testH5Tenum_functions:H5Tcreate", filetype_id > 0);
+        try {
+            enum_val[0]=10;
+            H5.H5Tenum_insert(filetype_id, "RED", enum_val);
+            enum_val[0]=11;
+            H5.H5Tenum_insert(filetype_id, "GREEN", enum_val);
+            enum_val[0]=12;
+            H5.H5Tenum_insert(filetype_id, "BLUE", enum_val);
+            enum_val[0]=13;
+            H5.H5Tenum_insert(filetype_id, "ORANGE", enum_val);
+            enum_val[0]=14;
+            H5.H5Tenum_insert(filetype_id, "YELLOW", enum_val);
+
+            // Query member number and member index by member name, for enumeration type.
+            assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+            assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+
+            // Commit enumeration datatype and close it */
+            H5.H5Tcommit(H5fid, enum_type, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+            H5.H5Tclose(filetype_id);
+
+            // Open the dataytpe for query
+            filetype_id = H5.H5Topen(H5fid, enum_type, HDF5Constants.H5P_DEFAULT);
+            assertTrue("testH5Tenum_functions:H5Tcreate", filetype_id > 0);
+
+            // Query member number and member index by member name, for enumeration type
+            assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+            assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+    
+            // Query member value by member name, for enumeration type
+            H5.H5Tenum_valueof (filetype_id, "ORANGE", enum_val);
+            assertTrue("Incorrect value for enum member", enum_val[0]==13);
+    
+            // Query member value by member index, for enumeration type
+            H5.H5Tget_member_value (filetype_id, 2, enum_val);
+            assertTrue("Incorrect value for enum member", enum_val[0]==12);
+    
+            // Query member name by member value, for enumeration type
+            enum_val[0] = 14;
+            enum_name = H5.H5Tenum_nameof(filetype_id, enum_val, 16);
+            assertTrue("Incorrect name for enum member", enum_name.compareTo("YELLOW")==0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tenum_functions:query " + err);
+        }
+        finally {
+            if (filetype_id >= 0)
+                try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Tenum_create_functions() {
+        int       filetype_id = -1;
+        byte[]    enum_val = new byte[1];
+
+        // Create a enumerate datatype
+        try {
+            filetype_id = H5.H5Tenum_create(HDF5Constants.H5T_NATIVE_INT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tenum_create_functions:H5Tcreate " + err);
+        }
+        assertTrue("testH5Tenum_create_functions:H5Tcreate", filetype_id > 0);
+        try {
+            enum_val[0]=10;
+            H5.H5Tenum_insert(filetype_id, "RED", enum_val);
+            enum_val[0]=11;
+            H5.H5Tenum_insert(filetype_id, "GREEN", enum_val);
+            enum_val[0]=12;
+            H5.H5Tenum_insert(filetype_id, "BLUE", enum_val);
+            enum_val[0]=13;
+            H5.H5Tenum_insert(filetype_id, "ORANGE", enum_val);
+            enum_val[0]=14;
+            H5.H5Tenum_insert(filetype_id, "YELLOW", enum_val);
+
+            // Query member number and member index by member name, for enumeration type.
+            assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+            assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tenum_create_functions:H5Tget_nmembers " + err);
+        }
+        finally {
+            if (filetype_id >= 0)
+                try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Topaque_functions() {
+        int       filetype_id = -1;
+        String    opaque_name = null;
+
+        // Create a enumerate datatype
+        try {
+            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)4);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Topaque_functions:H5Tcreate " + err);
+        }
+        assertTrue("testH5Topaque_functions:H5Tcreate", filetype_id > 0);
+
+        try {
+            H5.H5Tset_tag(filetype_id, "opaque type");
+            opaque_name = H5.H5Tget_tag(filetype_id);
+            assertTrue("Incorrect tag for opaque type", opaque_name.compareTo("opaque type")==0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Topaque_functions:H5Tset_get_tag " + err);
+        }
+        finally {
+            if (filetype_id >= 0)
+                try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Tvlen_create() {
+       int filetype_id = -1;
+
+       try {
+           filetype_id = H5.H5Tvlen_create(HDF5Constants.H5T_C_S1);
+           assertTrue("testH5Tvlen_create", filetype_id > 0);
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tvlen_create.H5Tvlen_create " + err);
+       }
+       finally {
+           if (filetype_id >= 0)
+               try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+       }
+    }
+    
+    @Test
+    public void testH5Tis_variable_str() {
+       int filetype_id = -1;
+
+       try {
+           filetype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+           assertTrue("testH5Tis_variable_str", filetype_id > 0);
+
+           // Convert to variable-length string
+           H5.H5Tset_size(filetype_id, HDF5Constants.H5T_VARIABLE);
+
+           // Check if datatype is VL string
+           int vlclass = H5.H5Tget_class(filetype_id);
+           assertTrue("testH5Tis_variable_str:H5Tget_class", vlclass == HDF5Constants.H5T_STRING);
+           
+           assertTrue("testH5Tis_variable_str:H5Tis_variable_str", H5.H5Tis_variable_str(filetype_id));
+
+           // Verify that the class detects as a string
+           assertTrue("testH5Tis_variable_str:H5Tdetect_class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_STRING));
+       }
+       catch (Throwable err) {
+           err.printStackTrace();
+           fail("testH5Tis_variable_str.H5Tis_variable_str " + err);
+       }
+       finally {
+           if (filetype_id >= 0)
+               try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+       }
+    }
+    
+    @Test
+    public void testH5Tcompound_functions() {
+        int       filetype_id =-1;
+
+        // Create a enumerate datatype
+        try {
+            filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, (long)16);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tcompound_functions:H5Tcreate " + err);
+        }
+        assertTrue("testH5Tcompound_functions:H5Tcreate", filetype_id > 0);
+        try {
+            H5.H5Tinsert(filetype_id, "Lon", 0, HDF5Constants.H5T_NATIVE_DOUBLE);
+            H5.H5Tinsert(filetype_id, "Lat", 8, HDF5Constants.H5T_NATIVE_DOUBLE);
+
+            // Query member number and member index by member name, for enumeration type.
+            assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 2);
+            assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "Lat") == 1);
+
+            // We started to support this function for compound type in 1.8.6 release.
+            int order = H5.H5Tget_order(filetype_id);
+            assertFalse("Can't get order for compound type.", order == HDF5Constants.H5T_ORDER_ERROR);
+            assertTrue("Wrong order for this type.", (order == HDF5Constants.H5T_ORDER_LE) || (order == HDF5Constants.H5T_ORDER_BE));
+
+            // Make certain that the correct classes can be detected
+            assertTrue("Can't get correct class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_COMPOUND));
+            assertTrue("Can't get correct class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_FLOAT));
+            // Make certain that an incorrect class is not detected
+            assertFalse("Can get incorrect class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_TIME));
+            
+            // Query member name by member index
+            String index_name = H5.H5Tget_member_name (filetype_id, 0);
+            assertTrue("Incorrect name for member index", index_name.compareTo("Lon")==0);
+            
+            // Query member offset by member no
+            long index_offset = H5.H5Tget_member_offset (filetype_id, 1);
+            assertTrue("Incorrect offset for member no", index_offset == 8);
+            
+            // Query member type by member index
+            int index_type = H5.H5Tget_member_type (filetype_id, 0);
+            assertTrue("Incorrect type for member index", H5.H5Tequal(HDF5Constants.H5T_NATIVE_DOUBLE, index_type));
+            if (index_type >= 0)
+                try {H5.H5Tclose(index_type);} catch (Exception ex) {}
+           
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tcompound_functions:query " + err);
+        }
+        finally {
+            if (filetype_id >= 0)
+                try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+        }
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Tbasic.java b/sourceTest/java/test/hdf5lib/TestH5Tbasic.java
new file mode 100644
index 0000000..f61616c
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Tbasic.java
@@ -0,0 +1,137 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Tbasic {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+    
+    @Test
+    public void testH5Tcopy() {
+        int H5strdid = -1;
+        try {
+            H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            assertTrue("H5.H5Tcopy",H5strdid > 0);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tcopy: " + err);
+        }
+        finally {
+            if (H5strdid >= 0)
+                try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Tequal() {
+        int H5strdid = -1;
+        try {
+            H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            assertTrue("H5.H5Tcopy",H5strdid > 0);
+            boolean teq = H5.H5Tequal(HDF5Constants.H5T_C_S1, H5strdid);
+            assertTrue("H5.H5Tequal",teq);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tequal: " + err);
+        }
+        finally {
+            if (H5strdid >= 0)
+                try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+        }
+    }
+
+    @Test
+    public void testH5Tequal_not() {
+        int H5strdid = -1;
+        try {
+            H5strdid = H5.H5Tcopy(HDF5Constants.H5T_STD_U64LE);
+            assertTrue("H5.H5Tcopy",H5strdid > 0);
+            boolean teq = H5.H5Tequal(HDF5Constants.H5T_IEEE_F32BE, H5strdid);
+            assertFalse("H5.H5Tequal",teq);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tequal_not: " + err);
+        }
+        finally {
+            if (H5strdid >= 0)
+                try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+        }
+    }
+    
+    @Test
+    public void testH5Tconvert() {
+        String[] strs = {"a1234","b1234"};
+        int srcLen = 5;
+        int dstLen = 10;
+        int srcId = -1;
+        int dstId = -1;
+        int dimSize = strs.length;
+        byte[]   buf = new byte[dimSize*dstLen];
+        
+        for (int i=0; i<dimSize; i++)
+            System.arraycopy(strs[i].getBytes(), 0, buf, i*srcLen, 5);
+   
+        try {
+            srcId = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            H5.H5Tset_size(srcId, srcLen);
+     
+            dstId = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            H5.H5Tset_size(dstId, dstLen);
+     
+            H5.H5Tconvert(srcId, dstId, dimSize, buf, null, HDF5Constants.H5P_DEFAULT);
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Tconvert: " + err);
+        }
+        finally {
+            try {H5.H5Tclose(srcId);} catch (Exception ex) {}
+            try {H5.H5Tclose(dstId);} catch (Exception ex) {}
+        }
+        
+        for (int i=0; i<strs.length; i++) {
+            assertTrue((new String(buf, i*dstLen, dstLen)).startsWith(strs[i]));
+        }
+    }
+    
+    @Test
+    public void testH5Torder_size() {
+        int H5strdid = -1;
+        try {
+            // Fixed length string
+            H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+            assertTrue("H5.H5Tcopy",H5strdid > 0);
+            H5.H5Tset_size(H5strdid, (long)5);
+            assertTrue(HDF5Constants.H5T_ORDER_NONE == H5.H5Tget_order(H5strdid));
+            H5.H5Tset_order(H5strdid, HDF5Constants.H5T_ORDER_NONE);
+            assertTrue(HDF5Constants.H5T_ORDER_NONE == H5.H5Tget_order(H5strdid));
+            assertTrue(5 == H5.H5Tget_size(H5strdid));
+
+            // Variable length string
+            H5.H5Tset_size(H5strdid, HDF5Constants.H5T_VARIABLE);
+            H5.H5Tset_order(H5strdid, HDF5Constants.H5T_ORDER_BE);
+            assertTrue(HDF5Constants.H5T_ORDER_BE == H5.H5Tget_order(H5strdid));
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("testH5Torder: " + err);
+        }
+        finally {
+            if (H5strdid >= 0)
+                try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+        }
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Tparams.java b/sourceTest/java/test/hdf5lib/TestH5Tparams.java
new file mode 100644
index 0000000..bf30e7b
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Tparams.java
@@ -0,0 +1,364 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestH5Tparams {
+
+    @Before
+    public void checkOpenIDs() {
+        assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tcopy_invalid() throws Throwable {
+        H5.H5Tcopy(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tequal_invalid() throws Throwable {
+        H5.H5Tequal(-1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tlock_invalid() throws Throwable {
+        H5.H5Tlock(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_class_invalid() throws Throwable {
+        H5.H5Tget_class(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_size_invalid() throws Throwable {
+        H5.H5Tget_size(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_size_long_invalid() throws Throwable {
+        H5.H5Tget_size_long(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_size_invalid() throws Throwable {
+        H5.H5Tset_size(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_order_invalid() throws Throwable {
+        H5.H5Tget_order(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_order_invalid() throws Throwable {
+        H5.H5Tset_order(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_precision_invalid() throws Throwable {
+        H5.H5Tget_precision(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_precision_long_invalid() throws Throwable {
+        H5.H5Tget_precision_long(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_precision_invalid() throws Throwable {
+        H5.H5Tset_precision(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_offset_invalid() throws Throwable {
+        H5.H5Tget_offset(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_offset_invalid() throws Throwable {
+        H5.H5Tset_offset(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tcreate_invalid() throws Throwable {
+        H5.H5Tcreate(-1, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Topen_null() throws Throwable {
+        H5.H5Topen(-1, null, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Topen_invalid() throws Throwable {
+        H5.H5Topen(-1, "Bogus", 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tcommit_null() throws Throwable {
+        H5.H5Tcommit(-1, null, 0, -1, -1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tcommit_invalid() throws Throwable {
+        H5.H5Tcommit(-1, "Bogus", -1, -1, -1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tget_pad_null() throws Throwable {
+        H5.H5Tget_pad(-1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_pad_invalid() throws Throwable {
+        int[] pad = new int[2];
+        H5.H5Tget_pad(-1, pad);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_pad_invalid() throws Throwable {
+        H5.H5Tset_pad(-1, -1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_sign_invalid() throws Throwable {
+        H5.H5Tget_sign(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_sign_invalid() throws Throwable {
+        H5.H5Tset_sign(-1, 0);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tget_fields_null() throws Throwable {
+        H5.H5Tget_fields(-1, (long[])null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Tget_fields_length_invalid() throws Throwable {
+        long[] fields = new long[2];
+        H5.H5Tget_fields(-1, fields);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_fields_invalid() throws Throwable {
+        long[] fields = new long[5];
+        H5.H5Tget_fields(-1, fields);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_fields_invalid() throws Throwable {
+        H5.H5Tset_fields(-1, -1, -1, -1, -1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_ebias_invalid() throws Throwable {
+        H5.H5Tget_ebias(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_ebias_long_invalid() throws Throwable {
+        H5.H5Tget_ebias_long(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_ebias_invalid() throws Throwable {
+        H5.H5Tset_ebias(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_norm_invalid() throws Throwable {
+        H5.H5Tget_norm(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_norm_invalid() throws Throwable {
+        H5.H5Tset_norm(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_inpad_invalid() throws Throwable {
+        H5.H5Tget_inpad(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_inpad_invalid() throws Throwable {
+        H5.H5Tset_inpad(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_cset_invalid() throws Throwable {
+        H5.H5Tget_cset(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_cset_invalid() throws Throwable {
+        H5.H5Tset_cset(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_strpad_invalid() throws Throwable {
+        H5.H5Tget_strpad(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_strpad_invalid() throws Throwable {
+        H5.H5Tset_strpad(-1, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_nmembers_invalid() throws Throwable {
+        H5.H5Tget_nmembers(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tget_member_index_null() throws Throwable {
+        H5.H5Tget_member_index(-1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_member_index_invalid() throws Throwable {
+        H5.H5Tget_member_index(-1, "Bogus");
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_member_type_invalid() throws Throwable {
+        H5.H5Tget_member_type(-1, -1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_member_class_invalid() throws Throwable {
+        H5.H5Tget_member_class(-1, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tinsert_null() throws Throwable {
+        H5.H5Tinsert(-1, null, 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tinsert_invalid() throws Throwable {
+        H5.H5Tinsert(-1, "Bogus", 0, 0);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tpack_invalid() throws Throwable {
+        H5.H5Tpack(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tvlen_create_invalid() throws Throwable {
+        H5.H5Tvlen_create(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tset_tag_null() throws Throwable {
+        H5.H5Tset_tag(-1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tset_tag_invalid() throws Throwable {
+        H5.H5Tset_tag(-1, "Bogus");
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_super_invalid() throws Throwable {
+        H5.H5Tget_super(-1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tenum_create_invalid() throws Throwable {
+        H5.H5Tenum_create(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tenum_insert_name_null() throws Throwable {
+        H5.H5Tenum_insert(-1, null, (byte[])null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tenum_insert_null() throws Throwable {
+        H5.H5Tenum_insert(-1, "bogus", (byte[])null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tenum_insert_invalid() throws Throwable {
+        byte[] enumtype = new byte[2];
+        H5.H5Tenum_insert(-1, "bogus", enumtype);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Tenum_nameof_invalid_size() throws Throwable {
+        H5.H5Tenum_nameof(-1, null, -1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tenum_nameof_value_null() throws Throwable {
+        H5.H5Tenum_nameof(-1, null, 1);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tenum_nameof_invalid() throws Throwable {
+        byte[] btype = new byte[2];
+        H5.H5Tenum_nameof(-1, btype, 1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tenum_valueof_name_null() throws Throwable {
+        H5.H5Tenum_valueof(-1, null, (byte[])null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tenum_valueof_null() throws Throwable {
+        H5.H5Tenum_valueof(-1, "bogus", (byte[])null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tenum_valueof_invalid() throws Throwable {
+        byte[] btype = new byte[2];
+        H5.H5Tenum_valueof(-1, "bogus", btype);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tget_member_value_null() throws Throwable {
+        H5.H5Tget_member_value(-1, -1, (byte[])null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_member_value_invalid() throws Throwable {
+        byte[] btype = new byte[2];
+        H5.H5Tget_member_value(-1, -1, btype);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testH5Tarray_create_invalid() throws Throwable {
+        H5.H5Tarray_create(-1, -1, null);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tarray_create_value_null() throws Throwable {
+        H5.H5Tarray_create(-1, 1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_array_ndims_invalid() throws Throwable {
+        H5.H5Tget_array_ndims(-1);
+    }
+
+    @Test(expected = NullPointerException.class)
+    public void testH5Tget_array_dims_null() throws Throwable {
+        H5.H5Tget_array_dims(-1, null);
+    }
+
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Tget_native_type_invalid() throws Throwable {
+        H5.H5Tget_native_type(-1);
+    }
+
+}
diff --git a/sourceTest/java/test/hdf5lib/TestH5Z.java b/sourceTest/java/test/hdf5lib/TestH5Z.java
new file mode 100644
index 0000000..f64f5a0
--- /dev/null
+++ b/sourceTest/java/test/hdf5lib/TestH5Z.java
@@ -0,0 +1,74 @@
+package test.hdf5lib;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.Test;
+
+public class TestH5Z {
+    
+    @Test
+    public void testH5Zfilter_avail() {
+        try {
+            int filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_FLETCHER32);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            /*
+            filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
+            assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+            */
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Zfilter_avail " + err);
+        }
+    }
+    
+    @Test
+    public void testH5Zget_filter_info() {
+        try {
+            int filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            /*
+            filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+            assertTrue("H5.H5Zget_filter_info", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+            */
+        }
+        catch (Throwable err) {
+            err.printStackTrace();
+            fail("H5.H5Zget_filter_info " + err);
+        }
+    }
+    
+    @Test(expected = HDF5LibraryException.class)
+    public void testH5Zunregister_predefined() throws Throwable {
+        int filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+        assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+        
+        H5.H5Zunregister(HDF5Constants.H5Z_FILTER_SHUFFLE);
+    }
+}
diff --git a/sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf b/sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf
new file mode 100644
index 0000000..e462703
Binary files /dev/null and b/sourceTest/java/test/hdf5lib/h5ex_g_iterate.hdf differ
diff --git a/sourceTest/java/tests.xml b/sourceTest/java/tests.xml
new file mode 100644
index 0000000..7ed1e5d
--- /dev/null
+++ b/sourceTest/java/tests.xml
@@ -0,0 +1,12 @@
+<suite name="All" verbose="1">
+    <test name="All">
+        <groups>
+            <run>
+                <exclude name="broken" />
+            </run>
+        </groups>
+        <packages>
+            <package name="ch.systemsx.cisd.hdf5.*" />
+        </packages>
+    </test>
+</suite>

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/libsis-jhdf5-java.git



More information about the debian-med-commit mailing list